diff --git a/.flake8 b/.flake8 index 91efeb3301..6a78e690f9 100644 --- a/.flake8 +++ b/.flake8 @@ -12,4 +12,5 @@ exclude = migrations, virtualenv, ldap_config.py - api_app/analyzers_manager/migrations/* \ No newline at end of file + api_app/analyzers_manager/migrations/* + api_app/ingestors_manager/migrations/* \ No newline at end of file diff --git a/.github/CHANGELOG.md b/.github/CHANGELOG.md index 4199cefc47..8246dc6cee 100644 --- a/.github/CHANGELOG.md +++ b/.github/CHANGELOG.md @@ -2,6 +2,16 @@ [**Upgrade Guide**](https://intelowlproject.github.io/docs/IntelOwl/installation/#update-to-the-most-recent-version) +## [v6.3.0](https://github.com/intelowlproject/IntelOwl/releases/tag/v6.3.0) + +This release brings official support for ARM architecture. From now on, our Docker builds are multi-platform. You can now run IntelOwl in your favourite ARM machine smoothly, e.g. Apple Silicon Mac and Raspberry PI. + +We have few new analyzers that you can play with (in particular new Vulnerability scanners like WAD, Nuclei) and updated Abuse.Ch analyzers to allow the configuration of your API key. + +Then we have a lot of fixes and dependencies upgrades as usual. + +Happy hunting! + ## [v6.2.1](https://github.com/intelowlproject/IntelOwl/releases/tag/v6.2.1) Minor fixes and dependencies upgrades diff --git a/.github/dependabot.yml b/.github/dependabot.yml index a039474196..305e802a1e 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -7,12 +7,12 @@ updates: day: "tuesday" target-branch: "develop" ignore: - # ignore all patch updates since we are using ~= - # this does not work for security updates - dependency-name: "*" update-types: ["version-update:semver-patch"] - dependency-name: "boto3" update-types: ["version-update:semver-minor"] + - dependency-name: "faker" + update-types: ["version-update:semver-minor"] - package-ecosystem: "pip" directory: "/integrations/malware_tools_analyzers/requirements" @@ -21,8 +21,6 @@ updates: day: "tuesday" target-branch: "develop" ignore: - # ignore all patch updates since we are using ~= - # this does not work for security updates - dependency-name: "*" update-types: ["version-update:semver-patch"] @@ -33,8 +31,6 @@ updates: day: "tuesday" target-branch: "develop" ignore: - # ignore all patch updates since we are using ~= - # this does not work for security updates - dependency-name: "*" update-types: [ "version-update:semver-patch" ] @@ -44,6 +40,16 @@ updates: interval: "weekly" day: "tuesday" target-branch: "develop" + ignore: + - dependency-name: "*" + update-types: [ "version-update:semver-patch" ] + + - package-ecosystem: "pip" + directory: "/integrations/nuclei_analyzer" + schedule: + interval: "weekly" + day: "tuesday" + target-branch: "develop" ignore: # ignore all patch updates since we are using ~= # this does not work for security updates @@ -57,8 +63,6 @@ updates: day: "tuesday" target-branch: "develop" ignore: - # ignore all patch updates since we are using ~= - # this does not work for security updates - dependency-name: "*" update-types: [ "version-update:semver-patch" ] @@ -78,8 +82,6 @@ updates: day: "tuesday" target-branch: "develop" ignore: - # ignore all patch updates since we are using ~= - # this does not work for security updates - dependency-name: "*" update-types: ["version-update:semver-patch"] @@ -90,8 +92,6 @@ updates: day: "tuesday" target-branch: "develop" ignore: - # ignore all patch updates since we are using ~= - # this does not work for security updates - dependency-name: "*" update-types: ["version-update:semver-patch"] @@ -102,8 +102,6 @@ updates: day: "tuesday" target-branch: "develop" ignore: - # ignore all patch updates since we are using ~= - # this does not work for security updates - dependency-name: "*" update-types: ["version-update:semver-patch"] @@ -113,6 +111,26 @@ updates: interval: "weekly" day: "tuesday" target-branch: "develop" + ignore: + - dependency-name: "*" + update-types: ["version-update:semver-patch"] + + - package-ecosystem: "docker" + directory: "/integrations/cyberchef" + schedule: + interval: "weekly" + day: "tuesday" + target-branch: "develop" + ignore: + - dependency-name: "*" + update-types: ["version-update:semver-patch"] + + - package-ecosystem: "docker" + directory: "/integrations/nuclei_analyzer" + schedule: + interval: "weekly" + day: "tuesday" + target-branch: "develop" ignore: # ignore all patch updates since we are using ~= # this does not work for security updates @@ -126,11 +144,19 @@ updates: day: "tuesday" target-branch: "develop" ignore: - # ignore all patch updates since we are using ~= - # this does not work for security updates - dependency-name: "*" update-types: ["version-update:semver-patch"] + - package-ecosystem: "docker" + directory: "/integrations/thug" + schedule: + interval: "weekly" + day: "tuesday" + target-branch: "develop" + ignore: + - dependency-name: "*" + update-types: [ "version-update:semver-patch" ] + - package-ecosystem: "docker" directory: "/integrations/phishing_analyzers" schedule: @@ -138,8 +164,6 @@ updates: day: "tuesday" target-branch: "develop" ignore: - # ignore all patch updates since we are using ~= - # this does not work for security updates - dependency-name: "*" update-types: ["version-update:semver-patch"] @@ -150,7 +174,5 @@ updates: day: "tuesday" target-branch: "develop" ignore: - # ignore all patch updates since we are using ~= - # this does not work for security updates - dependency-name: "*" update-types: ["version-update:semver-patch"] diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index d71fd46ae2..67d1fb2267 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -28,6 +28,7 @@ Please delete options that are not relevant. - [ ] If the plugin interacts with an external service, I have created an attribute called precisely `url` that contains this information. This is required for Health Checks. - [ ] If the plugin requires mocked testing, `_monkeypatch()` was used in its class to apply the necessary decorators. - [ ] I have added that raw JSON sample to the `MockUpResponse` of the `_monkeypatch()` method. This serves us to provide a valid sample for testing. +- [ ] I have inserted the copyright banner at the start of the file: ```# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl # See the file 'LICENSE' for copying permission.``` - [ ] If external libraries/packages with restrictive licenses were used, they were added in the [Legal Notice](https://github.com/certego/IntelOwl/blob/master/.github/legal_notice.md) section. - [ ] Linters (`Black`, `Flake`, `Isort`) gave 0 errors. If you have correctly installed [pre-commit](https://intelowlproject.github.io/docs/IntelOwl/contribute/#how-to-start-setup-project-and-development-instance), it does these checks and adjustments on your behalf. - [ ] I have added tests for the feature/bug I solved (see `tests` folder). All the tests (new and old ones) gave 0 errors. @@ -38,4 +39,4 @@ Please delete options that are not relevant. ### Important Rules - If you miss to compile the Checklist properly, your PR won't be reviewed by the maintainers. -- Everytime you make changes to the PR and you think the work is done, you should explicitly ask for a review. After being reviewed and received a "change request", you should explicitly ask for a review again once you have made the requested changes. \ No newline at end of file +- Everytime you make changes to the PR and you think the work is done, you should explicitly ask for a review by using GitHub's reviewing system detailed [here](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/requesting-a-pull-request-review). \ No newline at end of file diff --git a/.github/release_template.md b/.github/release_template.md index bc8a70d65e..ab514efb76 100644 --- a/.github/release_template.md +++ b/.github/release_template.md @@ -21,4 +21,5 @@ WARNING: The release will be live within an hour! ```commandline published #IntelOwl vX.X.X! https://github.com/intelowlproject/IntelOwl/releases/tag/vX.X.X #ThreatIntelligence #CyberSecurity #OpenSource #OSINT #DFIR ``` -- [ ] If that was a major release or an important release, communicate the news to the marketing staff \ No newline at end of file +- [ ] If that was a major release or an important release, communicate the news to the marketing staff +- [ ] This is a good time to check for old dangling issues and clean-up the inactive ones. Same for issues solved by this release. \ No newline at end of file diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 88fb828e28..8a362c3258 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -44,7 +44,7 @@ jobs: fetch-depth: 2 - name: Set up Python - uses: actions/setup-python@v5.3.0 + uses: actions/setup-python@v5.4.0 with: python-version: '3.11' diff --git a/.github/workflows/pull_request_automation.yml b/.github/workflows/pull_request_automation.yml index 3367b61724..0996bcb60a 100644 --- a/.github/workflows/pull_request_automation.yml +++ b/.github/workflows/pull_request_automation.yml @@ -37,7 +37,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python - uses: actions/setup-python@v5.3.0 + uses: actions/setup-python@v5.4.0 with: python-version: 3.11 diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 6905a99563..eb5b3bc6ee 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -59,7 +59,7 @@ jobs: # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 with: name: SARIF file path: results.sarif diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 0000000000..47c654f289 --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,23 @@ +name: "Tag stale issues and pull requests" + +on: + schedule: + - cron: "0 9 * * *" # Runs every day at 9 AM + workflow_dispatch: # Allows the workflow to be triggered manually + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v9 + with: + repo-token: ${{ github.token }} + stale-pr-message: "This pull request has been marked as stale because it has had no activity for 10 days. If you are still working on this, please provide some updates or it will be closed in 5 days." + close-pr-message: "This pull request has been closed because it had no updates in 15 days. If you're still working on this fell free to reopen." + days-before-pr-stale: 10 + days-before-pr-close: 5 + stale-pr-label: "stale" + exempt-pr-labels: "keep-open" + operations-per-run: 100 + debug-only: false + exempt-all-milestones: true \ No newline at end of file diff --git a/README.md b/README.md index ceaf8576b9..cab204f90d 100644 --- a/README.md +++ b/README.md @@ -55,10 +55,10 @@ To know more about the project and its growth over time, you may be interested i You can see the full list of all available analyzers in the [documentation](https://intelowlproject.github.io/docs/IntelOwl/usage/#analyzers). -| Type | Analyzers Available | -| -------------------------------------------------- |-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| Inbuilt modules | - Static Office Document, RTF, PDF, PE File Analysis and metadata extraction
- Strings Deobfuscation and analysis ([FLOSS](https://github.com/mandiant/flare-floss), [Stringsifter](https://github.com/mandiant/stringsifter), ...)
- PE Emulation with [Qiling](https://github.com/qilingframework/qiling) and [Speakeasy](https://github.com/mandiant/speakeasy)
- PE Signature verification
- PE Capabilities Extraction ([CAPA](https://github.com/mandiant/capa))
- Javascript Emulation ([Box-js](https://github.com/CapacitorSet/box-js))
- Android Malware Analysis ([Quark-Engine](https://github.com/quark-engine/quark-engine), ...)
- SPF and DMARC Validator
- Yara (a lot of public rules are available. You can also add your own rules)
- more... | -| External services | - Abuse.ch MalwareBazaar/URLhaus/Threatfox/YARAify
- GreyNoise v2
- Intezer
- VirusTotal v3
- Crowdsec
- URLscan
- Shodan
- AlienVault OTX
- Intelligence_X
- MISP
- many more.. | +| Type | Analyzers Available | +| -------------------------------------------------- |---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Inbuilt modules | - Static Office Document, RTF, PDF, PE, ELF, APK File Analysis and metadata extraction
- Strings Deobfuscation and analysis ([FLOSS](https://github.com/mandiant/flare-floss), [Stringsifter](https://github.com/mandiant/stringsifter), ...)
- [Yara](https://virustotal.github.io/yara/), [ClamAV](https://www.clamav.net/) (a lot of public rules are available. You can also add your own rules)
- PE Emulation with [Qiling](https://github.com/qilingframework/qiling) and [Speakeasy](https://github.com/mandiant/speakeasy)
- PE Signature verification
- PE Capabilities Extraction ([CAPA](https://github.com/mandiant/capa) and [Blint](https://github.com/owasp-dep-scan/blint))
- Javascript Emulation ([Box-js](https://github.com/CapacitorSet/box-js))
- Android Malware Analysis ([Quark-Engine](https://github.com/quark-engine/quark-engine), [Androguard](https://github.com/androguard/androguard), [Mobsf](https://github.com/MobSF/mobsfscan/), ...)
- SPF and DMARC Validator
- PCAP Analysis with [Suricata](https://github.com/OISF/suricata) and [Hfinger](https://github.com/CERT-Polska/hfinger)
- Honeyclients ([Thug](https://github.com/buffer/thug), [Selenium](https://github.com/wkeeling/selenium-wire))
- Scanners ([WAD](https://github.com/CERN-CERT/WAD), [Nuclei](https://github.com/projectdiscovery/nuclei), ...)
- more... | +| External services | - Abuse.ch MalwareBazaar/URLhaus/Threatfox/YARAify
- GreyNoise v2
- Intezer
- VirusTotal v3
- Crowdsec
- URLscan
- Shodan
- AlienVault OTX
- Intelligence_X
- MISP
- many more.. | ## Partnerships and sponsors @@ -86,7 +86,7 @@ IntelOwl was born out of Certego's Threat intelligence R&D division and is const [The Honeynet Project](https://www.honeynet.org) is a non-profit organization working on creating open source cyber security tools and sharing knowledge about cyber threats. -Thanks to Honeynet, we are hosting a public demo of the application [here](https://intelowl.honeynet.org). If you are interested, please contact a member of Honeynet to get access to the public service. +Thanks to Honeynet, we are hosting a public demo of the application [here](https://intelowl.honeynet.org). If you are interested, please contact a member of Honeynet or an IntelOwl maintainer to get access to the public service. #### Google Summer of Code GSoC logo diff --git a/api_app/analyzers_manager/file_analyzers/detectiteasy.py b/api_app/analyzers_manager/file_analyzers/detectiteasy.py index 3ac5e35e49..7ee3672e3f 100644 --- a/api_app/analyzers_manager/file_analyzers/detectiteasy.py +++ b/api_app/analyzers_manager/file_analyzers/detectiteasy.py @@ -1,60 +1,61 @@ +import json import logging -from api_app.analyzers_manager.classes import DockerBasedAnalyzer, FileAnalyzer +import die + +from api_app.analyzers_manager.classes import FileAnalyzer from tests.mock_utils import MockUpResponse logger = logging.getLogger(__name__) -class DetectItEasy(FileAnalyzer, DockerBasedAnalyzer): - name: str = "executable_analyzer" - url: str = "http://malware_tools_analyzers:4002/die" - # http request polling max number of tries - max_tries: int = 10 - # interval between http request polling (in secs) - poll_distance: int = 1 +class DetectItEasy(FileAnalyzer): def update(self): pass def run(self): - fname = str(self.filename).replace("/", "_").replace(" ", "_") - # get the file to send - binary = self.read_file_bytes() - args = [f"@{fname}", "--json"] - req_data = { - "args": args, - } - req_files = {fname: binary} - logger.info( - f"Running {self.analyzer_name} on {self.filename} with args: {args}" + logger.info(f"Running DIE on {self.filepath} for {self.md5}") + + json_report = die.scan_file( + self.filepath, die.ScanFlags.RESULT_AS_JSON, str(die.database_path / "db") ) - report = self._docker_run(req_data, req_files, analyzer_name=self.analyzer_name) - if not report: - self.report.errors.append("DIE did not detect the file type") - return {} - return report + + return json.loads(json_report) @staticmethod def mocked_docker_analyzer_get(*args, **kwargs): return MockUpResponse( { - "report": { - "arch": "NOEXEC", - "mode": "Unknown", - "type": "Unknown", - "detects": [ - { - "name": "Zip", - "type": "archive", - "string": "archive: Zip(2.0)[38.5%,1 file]", - "options": "38.5%,1 file", - "version": "2.0", - } - ], - "filetype": "Binary", - "endianess": "LE", - } + "detects": [ + { + "filetype": "PE64", + "parentfilepart": "Header", + "values": [ + { + "info": "Console64,console", + "name": "GNU linker ld (GNU Binutils)", + "string": "Linker: GNU linker ld (GNU Binutils)(2.28)[Console64,console]", + "type": "Linker", + "version": "2.28", + }, + { + "info": "", + "name": "MinGW", + "string": "Compiler: MinGW", + "type": "Compiler", + "version": "", + }, + { + "info": "NRV,brute", + "name": "UPX", + "string": "Packer: UPX(4.24)[NRV,brute]", + "type": "Packer", + "version": "4.24", + }, + ], + } + ] }, 200, ) diff --git a/api_app/analyzers_manager/file_analyzers/phishing/phishing_form_compiler.py b/api_app/analyzers_manager/file_analyzers/phishing/phishing_form_compiler.py index ad2fdf2095..a552fc533e 100644 --- a/api_app/analyzers_manager/file_analyzers/phishing/phishing_form_compiler.py +++ b/api_app/analyzers_manager/file_analyzers/phishing/phishing_form_compiler.py @@ -1,13 +1,13 @@ import logging from datetime import date, timedelta from typing import Dict -from urllib.parse import urlparse import requests from faker import Faker # skipcq: BAN-B410 from lxml.etree import HTMLParser # skipcq: BAN-B410 from lxml.html import document_fromstring from requests import HTTPError, Response +from requests.exceptions import MissingSchema from api_app.analyzers_manager.classes import FileAnalyzer from api_app.models import PythonConfig @@ -138,25 +138,33 @@ def identify_text_input(self, input_name: str) -> str: return fake_value def extract_action_attribute(self, form) -> str: - if not (form_action := form.get("action", None)): + form_action: str = form.get("action", None) + if not form_action: logger.info( f"'action' attribute not found in form. Defaulting to {self.target_site=}" ) form_action = self.target_site - - # if relative url extracted, clean it from '/' and concatenate everything - # if action was not extracted in previous step the if should not pass as it is a url - if not urlparse(form_action).netloc: + elif form_action.startswith("/"): # pure relative url logger.info(f"Found relative url in {form_action=}") + form_action = form_action.replace("/", "", 1) base_site = self.target_site + if base_site.endswith("/"): base_site = base_site[:-1] - if form_action.startswith("/"): - form_action = form_action.replace("/", "", 1) + form_action = base_site + "/" + form_action + elif ( + "." in form_action and "://" not in form_action + ): # found a domain (relative file names such as "login.php" should start with /) + logger.info(f"Found a domain in form action {form_action=}") + else: + base_site = self.target_site + if base_site.endswith("/"): + base_site = base_site[:-1] form_action = base_site + "/" + form_action logger.info(f"Extracted action to post data to: {form_action}") + return form_action def compile_form_field(self, form) -> dict: @@ -200,16 +208,29 @@ def perform_request_to_form(self, form) -> Response: headers = { "User-Agent": self.user_agent, } - response = requests.post( - url=dest_url, - data=params, - headers=headers, - proxies=( - {"http": self.proxy_address, "https": self.proxy_address} - if self.proxy_address - else None - ), - ) + try: + response = requests.post( + url=dest_url, + data=params, + headers=headers, + proxies=( + {"http": self.proxy_address, "https": self.proxy_address} + if self.proxy_address + else None + ), + ) + except MissingSchema: + logger.info(f"Adding default 'https://' schema to {dest_url}") + response = requests.post( + url="https://" + dest_url, + data=params, + headers=headers, + proxies=( + {"http": self.proxy_address, "https": self.proxy_address} + if self.proxy_address + else None + ), + ) logger.info(f"Request headers: {response.request.headers}") return response diff --git a/api_app/analyzers_manager/file_analyzers/thug_file.py b/api_app/analyzers_manager/file_analyzers/thug_file.py index 3d0cdd6954..9bdbe3acb3 100644 --- a/api_app/analyzers_manager/file_analyzers/thug_file.py +++ b/api_app/analyzers_manager/file_analyzers/thug_file.py @@ -8,7 +8,7 @@ class ThugFile(FileAnalyzer, DockerBasedAnalyzer): name: str = "Thug" - url: str = "http://malware_tools_analyzers:4002/thug" + url: str = "http://thug:4002/thug" # http request polling max number of tries max_tries: int = 15 # interval between http request polling (in secs) diff --git a/api_app/analyzers_manager/file_analyzers/yaraify_file_scan.py b/api_app/analyzers_manager/file_analyzers/yaraify_file_scan.py index a1906c49bd..e43cbd6d9d 100644 --- a/api_app/analyzers_manager/file_analyzers/yaraify_file_scan.py +++ b/api_app/analyzers_manager/file_analyzers/yaraify_file_scan.py @@ -4,7 +4,6 @@ import json import logging import time -from typing import Dict import requests @@ -27,7 +26,10 @@ class YARAifyFileScan(FileAnalyzer, YARAify): skip_noisy: bool skip_known: bool - def config(self, runtime_configuration: Dict): + def update(self) -> bool: + pass + + def config(self, runtime_configuration: dict): FileAnalyzer.config(self, runtime_configuration) self.query = "lookup_hash" YARAify.config(self, runtime_configuration) @@ -73,7 +75,9 @@ def run(self): "file": (name_to_send, file), } logger.info(f"yara file scan md5 {self.md5} sending sample for analysis") - response = requests.post(self.url, files=files_) + response = requests.post( + self.url, files=files_, headers=self.authentication_header + ) response.raise_for_status() scan_response = response.json() scan_query_status = scan_response.get("query_status") @@ -92,7 +96,9 @@ def run(self): f"task_id: {task_id}" ) data = {"query": "get_results", "task_id": task_id} - response = requests.post(self.url, json=data) + response = requests.post( + self.url, json=data, headers=self.authentication_header + ) response.raise_for_status() task_response = response.json() logger.debug(task_response) diff --git a/api_app/analyzers_manager/migrations/0144_analyzer_config_ultradns_dns.py b/api_app/analyzers_manager/migrations/0144_analyzer_config_ultradns_dns.py new file mode 100644 index 0000000000..d4ad149cb7 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0144_analyzer_config_ultradns_dns.py @@ -0,0 +1,163 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, + ReverseManyToOneDescriptor, + ReverseOneToOneDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "dns.dns_resolvers.ultradns_dns_resolver.UltraDNSDNSResolver", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "UltraDNS_DNS", + "description": "Retrieve current domain resolution with UltraDNS", + "disabled": False, + "soft_time_limit": 30, + "routing_key": "default", + "health_check_status": True, + "type": "observable", + "docker_based": False, + "maximum_tlp": "AMBER", + "observable_supported": ["url", "domain"], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "mapping_data_model": {}, + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [ + { + "python_module": { + "module": "dns.dns_resolvers.ultradns_dns_resolver.UltraDNSDNSResolver", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "query_type", + "type": "str", + "description": "", + "is_secret": False, + "required": False, + } +] + +values = [ + { + "parameter": { + "python_module": { + "module": "dns.dns_resolvers.ultradns_dns_resolver.UltraDNSDNSResolver", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "query_type", + "type": "str", + "description": "", + "is_secret": False, + "required": False, + }, + "analyzer_config": "UltraDNS_DNS", + "connector_config": None, + "visualizer_config": None, + "ingestor_config": None, + "pivot_config": None, + "for_organization": False, + "value": "A", + "updated_at": "2024-12-25T11:31:43.211468Z", + "owner": None, + } +] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ + ForwardManyToOneDescriptor, + ReverseManyToOneDescriptor, + ReverseOneToOneDescriptor, + ForwardOneToOneDescriptor, + ] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0065_job_mpnodesearch"), + ( + "analyzers_manager", + "0143_alter_analyzer_config_phishing_extractor_and_form_compiler", + ), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0145_analyzer_config_ultradns_malicious_detector.py b/api_app/analyzers_manager/migrations/0145_analyzer_config_ultradns_malicious_detector.py new file mode 100644 index 0000000000..027119f84d --- /dev/null +++ b/api_app/analyzers_manager/migrations/0145_analyzer_config_ultradns_malicious_detector.py @@ -0,0 +1,128 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, + ReverseManyToOneDescriptor, + ReverseOneToOneDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "dns.dns_malicious_detectors.ultradns_malicious_detector.UltraDNSMaliciousDetector", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "UltraDNS_Malicious_Detector", + "description": "Scan if a DNS is marked malicious by UltraDNS", + "disabled": False, + "soft_time_limit": 30, + "routing_key": "default", + "health_check_status": True, + "type": "observable", + "docker_based": False, + "maximum_tlp": "AMBER", + "observable_supported": ["url", "domain"], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "mapping_data_model": {}, + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [] + +values = [] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ + ForwardManyToOneDescriptor, + ReverseManyToOneDescriptor, + ReverseOneToOneDescriptor, + ForwardOneToOneDescriptor, + ] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0065_job_mpnodesearch"), + ( + "analyzers_manager", + "0144_analyzer_config_ultradns_dns", + ), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0146_analyzer_config_wad.py b/api_app/analyzers_manager/migrations/0146_analyzer_config_wad.py new file mode 100644 index 0000000000..b9d20abf88 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0146_analyzer_config_wad.py @@ -0,0 +1,128 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, + ReverseManyToOneDescriptor, + ReverseOneToOneDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "wad.WAD", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "WAD", + "description": "[WAD](https://github.com/CERN-CERT/WAD) (Web Application Detector) lets you analyze given URL(s) and detect technologies used by web application behind that URL, from the OS and web server level, to the programming platform and frameworks, as well as server- and client-side applications, tools and libraries.", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "type": "observable", + "docker_based": False, + "maximum_tlp": "CLEAR", + "observable_supported": ["url"], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "mapping_data_model": {}, + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [] + +values = [] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ + ForwardManyToOneDescriptor, + ReverseManyToOneDescriptor, + ReverseOneToOneDescriptor, + ForwardOneToOneDescriptor, + ] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0065_job_mpnodesearch"), + ( + "analyzers_manager", + "0145_analyzer_config_ultradns_malicious_detector", + ), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0147_alter_analyzer_config_feodo_yaraify_urlhaus_yaraify_scan.py b/api_app/analyzers_manager/migrations/0147_alter_analyzer_config_feodo_yaraify_urlhaus_yaraify_scan.py new file mode 100644 index 0000000000..df862da114 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0147_alter_analyzer_config_feodo_yaraify_urlhaus_yaraify_scan.py @@ -0,0 +1,98 @@ +from django.db import migrations + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PythonModule = apps.get_model("api_app", "PythonModule") + + # observables + observable_analyzers = [ + "urlhaus.URLHaus", + "yaraify.YARAify", + "feodo_tracker.Feodo_Tracker", + "threatfox.ThreatFox", + "mb_get.MB_GET", + "mb_google.MB_GOOGLE", + ] + for observable_analyzer in observable_analyzers: + module = PythonModule.objects.get( + module=observable_analyzer, + base_path="api_app.analyzers_manager.observable_analyzers", + ) + Parameter.objects.create( + name="service_api_key", + type="str", + description="Optional API key to connect to abuse.ch services.", + is_secret=True, + required=False, + python_module=module, + ) + + # files + yaraify_scan_module = PythonModule.objects.get( + module="yaraify_file_scan.YARAifyFileScan", + base_path="api_app.analyzers_manager.file_analyzers", + ) + Parameter.objects.create( + name="service_api_key", + type="str", + description="Optional API key to connect to abuse.ch services.", + is_secret=True, + required=False, + python_module=yaraify_scan_module, + ) + + +def reverse_migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PythonModule = apps.get_model("api_app", "PythonModule") + + # observables + observable_analyzers = [ + "urlhaus.URLHaus", + "yaraify.YARAify", + "feodo_tracker.Feodo_Tracker", + "threatfox.ThreatFox", + "mb_get.MB_GET", + "mb_google.MB_GOOGLE", + ] + for observable_analyzer in observable_analyzers: + module = PythonModule.objects.get( + module=observable_analyzer, + base_path="api_app.analyzers_manager.observable_analyzers", + ) + Parameter.objects.get( + name="service_api_key", + type="str", + description="Optional API key to connect to abuse.ch services.", + is_secret=True, + required=False, + python_module=module, + ).delete() + + # files + yaraify_scan_module = PythonModule.objects.get( + module="yaraify_file_scan.YARAifyFileScan", + base_path="api_app.analyzers_manager.file_analyzers", + ) + Parameter.objects.get( + name="service_api_key", + type="str", + description="Optional API key to connect to abuse.ch services.", + is_secret=True, + required=False, + python_module=yaraify_scan_module, + ).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0065_job_mpnodesearch"), + ( + "analyzers_manager", + "0146_analyzer_config_wad", + ), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0148_analyzer_config_nuclei.py b/api_app/analyzers_manager/migrations/0148_analyzer_config_nuclei.py new file mode 100644 index 0000000000..113076c536 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0148_analyzer_config_nuclei.py @@ -0,0 +1,163 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, + ReverseManyToOneDescriptor, + ReverseOneToOneDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "nuclei.NucleiAnalyzer", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "Nuclei", + "description": "[Nuclei](https://github.com/projectdiscovery/nuclei) is a fast, customizable vulnerability scanner that leverages YAML-based templates to detect, rank, and address security flaws. It operates using structured templates that define specific security checks.", + "disabled": False, + "soft_time_limit": 1200, + "routing_key": "default", + "health_check_status": True, + "type": "observable", + "docker_based": True, + "maximum_tlp": "RED", + "observable_supported": ["ip", "url"], + "supported_filetypes": [], + "run_hash": False, + "run_hash_type": "", + "not_supported_filetypes": [], + "mapping_data_model": {}, + "model": "analyzers_manager.AnalyzerConfig", +} + +params = [ + { + "python_module": { + "module": "nuclei.NucleiAnalyzer", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "template_dirs", + "type": "list", + "description": "The template_dirs parameter allows you to specify a list of directories containing templates, each focusing on a particular category of vulnerabilities, exposures, or security assessments.\r\nAvailable Template Categories:\r\ncloud\r\ncode\r\ncves\r\nvulnerabilities\r\ndns\r\nfile\r\nheadless\r\nhelpers\r\nhttp\r\njavascript\r\nnetwork\r\npassive\r\nprofiles\r\nssl\r\nworkflows\r\nexposures", + "is_secret": False, + "required": False, + } +] + +values = [ + { + "parameter": { + "python_module": { + "module": "nuclei.NucleiAnalyzer", + "base_path": "api_app.analyzers_manager.observable_analyzers", + }, + "name": "template_dirs", + "type": "list", + "description": "The template_dirs parameter allows you to specify a list of directories containing templates, each focusing on a particular category of vulnerabilities, exposures, or security assessments.\r\nAvailable Template Categories:\r\ncloud\r\ncode\r\ncves\r\nvulnerabilities\r\ndns\r\nfile\r\nheadless\r\nhelpers\r\nhttp\r\njavascript\r\nnetwork\r\npassive\r\nprofiles\r\nssl\r\nworkflows\r\nexposures", + "is_secret": False, + "required": False, + }, + "analyzer_config": "Nuclei", + "connector_config": None, + "visualizer_config": None, + "ingestor_config": None, + "pivot_config": None, + "for_organization": False, + "value": [], + "updated_at": "2025-01-08T08:33:45.653741Z", + "owner": None, + } +] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ + ForwardManyToOneDescriptor, + ReverseManyToOneDescriptor, + ReverseOneToOneDescriptor, + ForwardOneToOneDescriptor, + ] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0065_job_mpnodesearch"), + ( + "analyzers_manager", + "0147_alter_analyzer_config_feodo_yaraify_urlhaus_yaraify_scan", + ), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/analyzers_manager/migrations/0149_alter_die_analyzer.py b/api_app/analyzers_manager/migrations/0149_alter_die_analyzer.py new file mode 100644 index 0000000000..d16cf22707 --- /dev/null +++ b/api_app/analyzers_manager/migrations/0149_alter_die_analyzer.py @@ -0,0 +1,35 @@ +from django.db import migrations + + +def migrate(apps, schema_editor): + PythonModule = apps.get_model("api_app", "PythonModule") + + pm = PythonModule.objects.get( + module="detectiteasy.DetectItEasy", + base_path="api_app.analyzers_manager.file_analyzers", + ) + pm.parameters.all().delete() + + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + + ac = AnalyzerConfig.objects.get( + name="DetectItEasy", + ) + ac.docker_based = False + ac.save() + + +def reverse_migrate(apps, schema_editor): ... + + +class Migration(migrations.Migration): + dependencies = [ + ("api_app", "0065_job_mpnodesearch"), + ( + "analyzers_manager", + "0148_analyzer_config_nuclei", + ), + ] + operations = [ + migrations.RunPython(migrate, reverse_migrate), + ] diff --git a/api_app/analyzers_manager/observable_analyzers/ailtyposquatting.py b/api_app/analyzers_manager/observable_analyzers/ailtyposquatting.py index 0b79815813..c8981ba401 100644 --- a/api_app/analyzers_manager/observable_analyzers/ailtyposquatting.py +++ b/api_app/analyzers_manager/observable_analyzers/ailtyposquatting.py @@ -1,3 +1,5 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. import logging import math diff --git a/api_app/analyzers_manager/observable_analyzers/apivoid.py b/api_app/analyzers_manager/observable_analyzers/apivoid.py index 1bc8e2dcc6..f7548844b1 100644 --- a/api_app/analyzers_manager/observable_analyzers/apivoid.py +++ b/api_app/analyzers_manager/observable_analyzers/apivoid.py @@ -1,9 +1,12 @@ # flake8: noqa -# done for the mocked respose, +# done for the mocked response, # everything else is linted and tested +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. import requests from api_app.analyzers_manager import classes +from api_app.analyzers_manager.exceptions import AnalyzerConfigurationException from tests.mock_utils import MockUpResponse, if_mock_connections, patch @@ -16,27 +19,18 @@ def update(self): def run(self): if self.observable_classification == self.ObservableTypes.DOMAIN.value: - url = ( - self.url - + f"""/domainbl/v1/pay-as-you-go/ - ?key={self._api_key} - &host={self.observable_name}""" - ) + path = "domainbl" + parameter = "host" elif self.observable_classification == self.ObservableTypes.IP.value: - url = ( - self.url - + f"""/iprep/v1/pay-as-you-go/ - ?key={self._api_key} - &ip={self.observable_name}""" - ) + path = "iprep" + parameter = "ip" elif self.observable_classification == self.ObservableTypes.URL.value: - url = ( - self.url - + f"""/urlrep/v1/pay-as-you-go/ - ?key={self._api_key} - &url={self.observable_name}""" - ) - r = requests.get(url) + path = "urlrep" + parameter = "url" + else: + raise AnalyzerConfigurationException("not supported") + complete_url = f"{self.url}/{path}/v1/pay-as-you-go/?key={self._api_key}&{parameter}={self.observable_name}" + r = requests.get(complete_url) r.raise_for_status() return r.json() diff --git a/api_app/analyzers_manager/observable_analyzers/basic_observable_analyzer.py b/api_app/analyzers_manager/observable_analyzers/basic_observable_analyzer.py index 3b938790d1..30ad4be730 100644 --- a/api_app/analyzers_manager/observable_analyzers/basic_observable_analyzer.py +++ b/api_app/analyzers_manager/observable_analyzers/basic_observable_analyzer.py @@ -1,3 +1,5 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. import base64 import logging from tempfile import NamedTemporaryFile diff --git a/api_app/analyzers_manager/observable_analyzers/dns/dns_malicious_detectors/ultradns_malicious_detector.py b/api_app/analyzers_manager/observable_analyzers/dns/dns_malicious_detectors/ultradns_malicious_detector.py new file mode 100644 index 0000000000..e1c3dc9155 --- /dev/null +++ b/api_app/analyzers_manager/observable_analyzers/dns/dns_malicious_detectors/ultradns_malicious_detector.py @@ -0,0 +1,57 @@ +import ipaddress +from urllib.parse import urlparse + +import dns.resolver + +from api_app.analyzers_manager import classes +from api_app.analyzers_manager.exceptions import AnalyzerRunException + +from ..dns_responses import malicious_detector_response + + +class UltraDNSMaliciousDetector(classes.ObservableAnalyzer): + """Resolve a DNS query with UltraDNS servers, + if the response falls within the sinkhole range, the domain is malicious. + """ + + def update(self) -> bool: + pass + + def run(self): + is_malicious = False + observable = self.observable_name + + # for URLs we are checking the relative domain + if self.observable_classification == self.ObservableTypes.URL: + observable = urlparse(self.observable_name).hostname + + # Configure resolver with both nameservers + resolver = dns.resolver.Resolver() + resolver.nameservers = ["156.154.70.2", "156.154.71.2"] + resolver.timeout = 10 # Time per server + resolver.lifetime = 20 # Total time for all attempts + + sinkhole_range = ipaddress.ip_network("156.154.112.0/23") + + try: + answers = resolver.resolve(observable, "A") + for rdata in answers: + resolution = rdata.to_text() + # Check if the resolution falls in the sinkhole range + if ipaddress.ip_address(resolution) in sinkhole_range: + is_malicious = True + break + + except dns.exception.Timeout: + raise AnalyzerRunException( + "Connection to UltraDNS failed - both servers timed out" + ) + except Exception as e: + raise Exception(f"DNS query failed: {e}") + + return malicious_detector_response(self.observable_name, is_malicious) + + @classmethod + def _monkeypatch(cls): + patches = [] + return super()._monkeypatch(patches=patches) diff --git a/api_app/analyzers_manager/observable_analyzers/dns/dns_resolvers/ultradns_dns_resolver.py b/api_app/analyzers_manager/observable_analyzers/dns/dns_resolvers/ultradns_dns_resolver.py new file mode 100644 index 0000000000..40dccb79cd --- /dev/null +++ b/api_app/analyzers_manager/observable_analyzers/dns/dns_resolvers/ultradns_dns_resolver.py @@ -0,0 +1,64 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + +"""UltraDNS resolver implementation""" + +import logging +from urllib.parse import urlparse + +import dns.resolver + +from api_app.analyzers_manager import classes + +from ..dns_responses import dns_resolver_response + +logger = logging.getLogger(__name__) + + +class UltraDNSDNSResolver(classes.ObservableAnalyzer): + """Resolve a DNS query with UltraDNS servers""" + + query_type: str + + def update(self) -> bool: + pass + + def run(self): + + resolutions = [] + observable = self.observable_name + if self.observable_classification == self.ObservableTypes.URL: + observable = urlparse(self.observable_name).hostname + resolver = dns.resolver.Resolver() + + # Configure UltraDNS servers + resolver.nameservers = ["64.6.64.6", "64.6.65.6"] + resolver.timeout = 10 + resolver.lifetime = 20 + + try: + dns_resolutions = resolver.resolve(observable, self.query_type) + for resolution in dns_resolutions: + element = { + "TTL": dns_resolutions.rrset.ttl, + "data": resolution.to_text(), + "name": dns_resolutions.qname.to_text(), + "type": dns_resolutions.rdtype, + } + resolutions.append(element) + except ( + dns.resolver.NXDOMAIN, + dns.resolver.NoAnswer, + dns.resolver.NoNameservers, + ): + logger.info( + "No resolution for " + f"{self.observable_classification} {self.observable_name}" + ) + + return dns_resolver_response(self.observable_name, resolutions) + + @classmethod + def _monkeypatch(cls): + patches = [] + return super()._monkeypatch(patches=patches) diff --git a/api_app/analyzers_manager/observable_analyzers/feodo_tracker.py b/api_app/analyzers_manager/observable_analyzers/feodo_tracker.py index c7369f2d45..99a5a016c3 100644 --- a/api_app/analyzers_manager/observable_analyzers/feodo_tracker.py +++ b/api_app/analyzers_manager/observable_analyzers/feodo_tracker.py @@ -11,12 +11,14 @@ from api_app.analyzers_manager import classes from api_app.analyzers_manager.exceptions import AnalyzerRunException +from api_app.mixins import AbuseCHMixin +from api_app.models import PluginConfig from tests.mock_utils import MockUpResponse, if_mock_connections, patch logger = logging.getLogger(__name__) -class Feodo_Tracker(classes.ObservableAnalyzer): +class Feodo_Tracker(AbuseCHMixin, classes.ObservableAnalyzer): """ Feodo Tracker offers various blocklists, helping network owners to protect their @@ -65,6 +67,22 @@ def run(self): raise AnalyzerRunException(f"Key error in run: {e}") return result + # this is necessary because during the "update()" flow the config() + # method is not called and the attributes would not be accessible by "cls" + @classmethod + def get_service_auth_headers(cls) -> {}: + for plugin in PluginConfig.objects.filter( + parameter__python_module=cls.python_module, + parameter__is_secret=True, + parameter__name="service_api_key", + ): + if plugin.value: + logger.debug("Found auth key for feodo tracker update") + return {"Auth-Key": plugin.value} + + logger.debug("Not found auth key for feodo tracker update") + return {} + @classmethod def update(cls) -> bool: """ @@ -74,7 +92,7 @@ def update(cls) -> bool: logger.info(f"starting download of db from {db_url}") try: - r = requests.get(db_url) + r = requests.get(db_url, headers=cls.get_service_auth_headers()) r.raise_for_status() except requests.RequestException: return False diff --git a/api_app/analyzers_manager/observable_analyzers/mb_get.py b/api_app/analyzers_manager/observable_analyzers/mb_get.py index 8d9dc5d758..fbd1c37de5 100644 --- a/api_app/analyzers_manager/observable_analyzers/mb_get.py +++ b/api_app/analyzers_manager/observable_analyzers/mb_get.py @@ -1,27 +1,40 @@ # This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl # See the file 'LICENSE' for copying permission. +import logging import requests from api_app.analyzers_manager import classes +from api_app.mixins import AbuseCHMixin from tests.mock_utils import MockUpResponse, if_mock_connections, patch +logger = logging.getLogger(__name__) -class MB_GET(classes.ObservableAnalyzer): + +class MB_GET(AbuseCHMixin, classes.ObservableAnalyzer): url: str = "https://mb-api.abuse.ch/api/v1/" sample_url: str = "https://bazaar.abuse.ch/sample/" + def update(self) -> bool: + pass + def run(self): - return self.query_mb_api(observable_name=self.observable_name) + return self.query_mb_api( + observable_name=self.observable_name, + headers=self.authentication_header, + ) @classmethod - def query_mb_api(cls, observable_name: str) -> dict: + def query_mb_api(cls, observable_name: str, headers: dict = None) -> dict: """ This is in a ``classmethod`` so it can be reused in ``MB_GOOGLE``. """ post_data = {"query": "get_info", "hash": observable_name} - response = requests.post(cls.url, data=post_data) + if headers is None: + headers = {} + + response = requests.post(cls.url, data=post_data, headers=headers) response.raise_for_status() result = response.json() diff --git a/api_app/analyzers_manager/observable_analyzers/mb_google.py b/api_app/analyzers_manager/observable_analyzers/mb_google.py index de785d2d41..71cbfc9ef4 100644 --- a/api_app/analyzers_manager/observable_analyzers/mb_google.py +++ b/api_app/analyzers_manager/observable_analyzers/mb_google.py @@ -11,13 +11,19 @@ class MB_GOOGLE(MB_GET): This is a modified version of MB_GET. """ + def update(self) -> bool: + pass + def run(self): results = {} query = f"{self.observable_name} site:bazaar.abuse.ch" for url in googlesearch.search(query, stop=20): mb_hash = url.split("/")[-2] - res = super().query_mb_api(observable_name=mb_hash) + res = super().query_mb_api( + observable_name=mb_hash, + headers=self.authentication_header, + ) results[mb_hash] = res return results diff --git a/api_app/analyzers_manager/observable_analyzers/nuclei.py b/api_app/analyzers_manager/observable_analyzers/nuclei.py new file mode 100644 index 0000000000..1fc6b780ed --- /dev/null +++ b/api_app/analyzers_manager/observable_analyzers/nuclei.py @@ -0,0 +1,60 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. +import logging + +from api_app.analyzers_manager.classes import DockerBasedAnalyzer, ObservableAnalyzer + +logger = logging.getLogger(__name__) + + +class NucleiAnalyzer(ObservableAnalyzer, DockerBasedAnalyzer): + url: str = "http://nuclei_analyzer:4008/run-nuclei" + template_dirs: list + max_tries: int = 40 + poll_distance: int = 30 + + @classmethod + def update(cls) -> bool: + pass + + def run(self): + """ + Prepares and executes a Nuclei scan through the Docker-based API. + """ + VALID_TEMPLATE_CATEGORIES = { + "cloud", + "code", + "cves", + "vulnerabilities", + "dns", + "file", + "headless", + "helpers", + "http", + "javascript", + "network", + "passive", + "profiles", + "ssl", + "workflows", + "exposures", + } + + args = [self.observable_name] + + # Append valid template directories with the "-t" flag + for template_dir in self.template_dirs: + if template_dir in VALID_TEMPLATE_CATEGORIES: + args.extend(["-t", template_dir]) + else: + warning = f"Skipping invalid template directory: {template_dir} for observable {self.observable_name}" + logger.warning(warning) + self.report.errors.append(warning) + req_data = {"args": args} + + # Execute the request + response = self._docker_run(req_data=req_data, req_files=None) + + analysis = response.get("data", []) + + return analysis diff --git a/api_app/analyzers_manager/observable_analyzers/threatfox.py b/api_app/analyzers_manager/observable_analyzers/threatfox.py index 512a1e6960..c5af095ce9 100644 --- a/api_app/analyzers_manager/observable_analyzers/threatfox.py +++ b/api_app/analyzers_manager/observable_analyzers/threatfox.py @@ -2,14 +2,18 @@ # See the file 'LICENSE' for copying permission. import json +import logging import requests from api_app.analyzers_manager import classes +from api_app.mixins import AbuseCHMixin from tests.mock_utils import MockUpResponse, if_mock_connections, patch +logger = logging.getLogger(__name__) -class ThreatFox(classes.ObservableAnalyzer): + +class ThreatFox(AbuseCHMixin, classes.ObservableAnalyzer): url: str = "https://threatfox-api.abuse.ch/api/v1/" disable: bool = False # optional @@ -22,7 +26,11 @@ def run(self): payload = {"query": "search_ioc", "search_term": self.observable_name} - response = requests.post(self.url, data=json.dumps(payload)) + response = requests.post( + self.url, + data=json.dumps(payload), + headers=self.authentication_header, + ) response.raise_for_status() result = response.json() diff --git a/api_app/analyzers_manager/observable_analyzers/thug_url.py b/api_app/analyzers_manager/observable_analyzers/thug_url.py index 3e64b56c1e..aa04ea743c 100644 --- a/api_app/analyzers_manager/observable_analyzers/thug_url.py +++ b/api_app/analyzers_manager/observable_analyzers/thug_url.py @@ -8,7 +8,7 @@ class ThugUrl(ObservableAnalyzer, DockerBasedAnalyzer): name: str = "Thug" - url: str = "http://malware_tools_analyzers:4002/thug" + url: str = "http://thug:4002/thug" # http request polling max number of tries max_tries: int = 15 # interval between http request polling (in seconds) @@ -23,6 +23,11 @@ class ThugUrl(ObservableAnalyzer, DockerBasedAnalyzer): def _thug_args_builder(self): user_agent = self.user_agent + if not user_agent: + user_agent = ( + "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 " + "(KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36 Edg/131.0.2903.86" + ) dom_events = self.dom_events use_proxy = self.use_proxy proxy = self.proxy @@ -48,6 +53,7 @@ def run(self): tmp_dir = secrets.token_hex(4) tmp_dir_full_path = "/opt/deploy/thug" + tmp_dir # make request data + # the option -n is bugged and does not work https://github.com/intelowlproject/IntelOwl/issues/2656 args.extend(["-n", tmp_dir_full_path, self.observable_name]) req_data = { diff --git a/api_app/analyzers_manager/observable_analyzers/urlhaus.py b/api_app/analyzers_manager/observable_analyzers/urlhaus.py index 94d79566cf..97fd172313 100644 --- a/api_app/analyzers_manager/observable_analyzers/urlhaus.py +++ b/api_app/analyzers_manager/observable_analyzers/urlhaus.py @@ -1,14 +1,18 @@ # This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl # See the file 'LICENSE' for copying permission. +import logging import requests -from api_app.analyzers_manager import classes +from api_app.analyzers_manager.classes import ObservableAnalyzer from api_app.analyzers_manager.exceptions import AnalyzerRunException +from api_app.mixins import AbuseCHMixin from tests.mock_utils import MockUpResponse, if_mock_connections, patch +logger = logging.getLogger(__name__) -class URLHaus(classes.ObservableAnalyzer): + +class URLHaus(AbuseCHMixin, ObservableAnalyzer): url = "https://urlhaus-api.abuse.ch/v1/" disable: bool = False # optional @@ -34,7 +38,11 @@ def run(self): f"not supported observable type {self.observable_classification}." ) - response = requests.post(self.url + uri, data=post_data, headers=headers) + response = requests.post( + self.url + uri, + data=post_data, + headers=self.authentication_header | headers, + ) response.raise_for_status() return response.json() diff --git a/api_app/analyzers_manager/observable_analyzers/wad.py b/api_app/analyzers_manager/observable_analyzers/wad.py new file mode 100644 index 0000000000..7fbbe065f8 --- /dev/null +++ b/api_app/analyzers_manager/observable_analyzers/wad.py @@ -0,0 +1,55 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + +import logging + +from wad.detection import Detector + +from api_app.analyzers_manager import classes +from api_app.analyzers_manager.exceptions import AnalyzerRunException +from tests.mock_utils import if_mock_connections, patch + +logger = logging.getLogger(__name__) + + +class WAD(classes.ObservableAnalyzer): + """ + This analyzer is a wrapper for the WAD (Web Application Detector) project. + """ + + @classmethod + def update(cls) -> bool: + pass + + def run(self): + logger.info(f"Running WAD Analyzer for {self.observable_name}") + + detector = Detector() + + results = detector.detect(url=self.observable_name) + + if results: + return results + else: + raise AnalyzerRunException("no results returned for the provided url") + + @classmethod + def _monkeypatch(cls): + patches = [ + if_mock_connections( + patch.object( + Detector, + "detect", + return_value={ + "https://www.google.com/": [ + { + "app": "Google Web Server", + "ver": "null", + "type": "Web Servers", + } + ] + }, + ), + ) + ] + return super()._monkeypatch(patches=patches) diff --git a/api_app/analyzers_manager/observable_analyzers/yaraify.py b/api_app/analyzers_manager/observable_analyzers/yaraify.py index 7a0f0bbce4..a1191d8ec7 100644 --- a/api_app/analyzers_manager/observable_analyzers/yaraify.py +++ b/api_app/analyzers_manager/observable_analyzers/yaraify.py @@ -1,19 +1,26 @@ # This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl # See the file 'LICENSE' for copying permission. +import logging import requests from api_app.analyzers_manager.classes import ObservableAnalyzer +from api_app.mixins import AbuseCHMixin from tests.mock_utils import MockUpResponse, if_mock_connections, patch +logger = logging.getLogger(__name__) -class YARAify(ObservableAnalyzer): + +class YARAify(AbuseCHMixin, ObservableAnalyzer): url: str = "https://yaraify-api.abuse.ch/api/v1/" query: str result_max: int _api_key_name: str + def update(self) -> bool: + pass + def run(self): data = {"search_term": self.observable_name, "query": self.query} @@ -23,7 +30,9 @@ def run(self): if getattr(self, "_api_key_name", None): data["malpedia-token"] = self._api_key_name - response = requests.post(self.url, json=data) + response = requests.post( + self.url, json=data, headers=self.authentication_header + ) response.raise_for_status() result = response.json() diff --git a/api_app/ingestors_manager/ingestors/greedybear.py b/api_app/ingestors_manager/ingestors/greedybear.py new file mode 100644 index 0000000000..5eced50fa1 --- /dev/null +++ b/api_app/ingestors_manager/ingestors/greedybear.py @@ -0,0 +1,100 @@ +import ipaddress +import logging +from typing import Any, Iterable +from unittest.mock import patch + +import requests + +from api_app.ingestors_manager.classes import Ingestor +from api_app.ingestors_manager.exceptions import ( + IngestorConfigurationException, + IngestorRunException, +) +from tests.mock_utils import MockUpResponse, if_mock_connections + +logger = logging.getLogger(__name__) + + +class GreedyBear(Ingestor): + + url: str + feed_type: str + attack_type: str + age: str + + VALID_FEED_TYPES = {"log4j", "cowrie", "all"} + VALID_ATTACK_TYPES = {"scanner", "payload_request", "all"} + VALID_AGE = {"recent", "persistent"} + + @classmethod + def update(cls) -> bool: + pass + + def run(self) -> Iterable[Any]: + if self.feed_type not in self.VALID_FEED_TYPES: + raise IngestorConfigurationException( + f"Invalid feed_type: {self.feed_type}. Must be one of {self.VALID_FEED_TYPES}" + ) + if self.attack_type not in self.VALID_ATTACK_TYPES: + raise IngestorConfigurationException( + f"Invalid attack_type: {self.attack_type}. Must be one of {self.VALID_ATTACK_TYPES}" + ) + if self.age not in self.VALID_AGE: + raise IngestorConfigurationException( + f"Invalid age: {self.age}. Must be one of {self.VALID_AGE}" + ) + + req_url = ( + f"{self.url}/api/feeds/{self.feed_type}/{self.attack_type}/{self.age}.json" + ) + result = requests.get(req_url) + result.raise_for_status() + content = result.json() + if not isinstance(content.get("iocs"), list): + raise IngestorRunException(f"Content {content} not expected") + + limit = min(len(content["iocs"]), self.limit) + for elem in content["iocs"][:limit]: + value = elem.get("value") + try: + ipaddress.ip_address(value) + yield value + except ValueError: + pass + + @classmethod + def _monkeypatch(cls): + patches = [ + if_mock_connections( + patch( + "requests.get", + return_value=MockUpResponse( + { + "license": "https://github.com/honeynet/GreedyBear/blob/main/FEEDS_LICENSE.md", + "iocs": [ + { + "feed_type": "suricata", + "value": "91.205.219.185", + "scanner": True, + "payload_request": False, + "first_seen": "2024-05-29", + "last_seen": "2025-02-01", + "times_seen": 6437, + }, + { + "feed_type": "suricata", + "value": "88.210.32.15", + "scanner": True, + "payload_request": False, + "first_seen": "2024-07-30", + "last_seen": "2025-02-01", + "times_seen": 61, + }, + ], + }, + 200, + ), + ), + ) + ] + return super()._monkeypatch(patches=patches) diff --git a/api_app/ingestors_manager/ingestors/malware_bazaar.py b/api_app/ingestors_manager/ingestors/malware_bazaar.py index ddd6364b79..951586f22c 100644 --- a/api_app/ingestors_manager/ingestors/malware_bazaar.py +++ b/api_app/ingestors_manager/ingestors/malware_bazaar.py @@ -10,18 +10,21 @@ from api_app.ingestors_manager.classes import Ingestor from api_app.ingestors_manager.exceptions import IngestorRunException +from api_app.mixins import AbuseCHMixin from tests.mock_utils import MockUpResponse, if_mock_connections logger = logging.getLogger(__name__) -class MalwareBazaar(Ingestor): +class MalwareBazaar(AbuseCHMixin, Ingestor): # API endpoint url: str # Download samples that are up to X hours old hours: int # Download samples from chosen signatures (aka malware families) signatures: str + # Max number of results you want to display + limit: int @classmethod def update(cls) -> bool: @@ -31,7 +34,8 @@ def update(cls) -> bool: def get_signature_information(self, signature): result = requests.post( self.url, - data={"query": "get_siginfo", "signature": signature, "limit": 100}, + data={"query": "get_siginfo", "signature": signature, "limit": self.limit}, + headers=self.authentication_header, timeout=30, ) result.raise_for_status() @@ -51,6 +55,12 @@ def get_recent_samples(self): current_time = timezone.now() for signature in self.signatures: data = self.get_signature_information(signature) + hours_str = "hour" if self.hours == 1 else "hours" + if len(data) > self.limit: + logger.info( + f"{signature}: in the last {hours_str} there are " + f"more results than the limit {len(data)}/{self.limit}" + ) for elem in data: first_seen = timezone.make_aware( timezone.datetime.strptime(elem["first_seen"], "%Y-%m-%d %H:%M:%S") @@ -58,13 +68,6 @@ def get_recent_samples(self): diff = int((current_time - first_seen).total_seconds()) // 3600 if elem["signature"] == signature and diff <= self.hours: hashes.add(elem["sha256_hash"]) - - last_hours_str = ( - "Last hour" if self.hours == 1 else f"Last {self.hours} hours" - ) - logger.info( - f"{last_hours_str} {signature} samples: {len(hashes)}/{len(data)}" - ) return hashes def download_sample(self, h): @@ -75,6 +78,7 @@ def download_sample(self, h): "query": "get_file", "sha256_hash": h, }, + headers=self.authentication_header, timeout=60, ) sample_archive.raise_for_status() diff --git a/api_app/ingestors_manager/ingestors/threatfox.py b/api_app/ingestors_manager/ingestors/threatfox.py index 3c2f72d1e7..c27cc44cfe 100644 --- a/api_app/ingestors_manager/ingestors/threatfox.py +++ b/api_app/ingestors_manager/ingestors/threatfox.py @@ -6,12 +6,13 @@ from api_app.ingestors_manager.classes import Ingestor from api_app.ingestors_manager.exceptions import IngestorRunException +from api_app.mixins import AbuseCHMixin from tests.mock_utils import MockUpResponse, if_mock_connections logger = logging.getLogger(__name__) -class ThreatFox(Ingestor): +class ThreatFox(AbuseCHMixin, Ingestor): # API endpoint url = "https://threatfox-api.abuse.ch/api/v1/" # Days to check. From 1 to 7 @@ -22,7 +23,11 @@ def update(cls) -> bool: pass def run(self) -> Iterable[Any]: - result = requests.post(self.url, json={"query": "get_iocs", "days": self.days}) + result = requests.post( + self.url, + json={"query": "get_iocs", "days": self.days}, + headers=self.authentication_header, + ) result.raise_for_status() content = result.json() logger.info(f"ThreatFox data is {content}") diff --git a/api_app/ingestors_manager/migrations/0026_alter_ingestor_config_malware_bazaar_threatfox.py b/api_app/ingestors_manager/migrations/0026_alter_ingestor_config_malware_bazaar_threatfox.py new file mode 100644 index 0000000000..46247909a2 --- /dev/null +++ b/api_app/ingestors_manager/migrations/0026_alter_ingestor_config_malware_bazaar_threatfox.py @@ -0,0 +1,57 @@ +from django.db import migrations + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PythonModule = apps.get_model("api_app", "PythonModule") + + ingestors = [ + "malware_bazaar.MalwareBazaar", + "threatfox.ThreatFox", + ] + for ingestor in ingestors: + module = PythonModule.objects.get( + module=ingestor, + base_path="api_app.ingestors_manager.ingestors", + ) + Parameter.objects.create( + name="service_api_key", + type="str", + description="Optional API key to connect to abuse.ch services.", + is_secret=True, + required=False, + python_module=module, + ) + + +def reverse_migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PythonModule = apps.get_model("api_app", "PythonModule") + + ingestors = [ + "malware_bazaar.MalwareBazaar", + "threatfox.ThreatFox", + ] + for ingestor in ingestors: + module = PythonModule.objects.get( + module=ingestor, + base_path="api_app.ingestors_manager.ingestors", + ) + Parameter.objects.get( + name="service_api_key", + type="str", + description="Optional API key to connect to abuse.ch services.", + is_secret=True, + required=False, + python_module=module, + ).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0065_job_mpnodesearch"), + ("ingestors_manager", "0025_ingestor_config_virustotal_example_query"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/ingestors_manager/migrations/0027_added_limit_parameter_malware_bazaar_threatfox.py b/api_app/ingestors_manager/migrations/0027_added_limit_parameter_malware_bazaar_threatfox.py new file mode 100644 index 0000000000..23273a09a4 --- /dev/null +++ b/api_app/ingestors_manager/migrations/0027_added_limit_parameter_malware_bazaar_threatfox.py @@ -0,0 +1,80 @@ +from django.db import migrations + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PythonModule = apps.get_model("api_app", "PythonModule") + IngestorConfig = apps.get_model("ingestors_manager", "IngestorConfig") + PluginConfig = apps.get_model("api_app", "PluginConfig") + + ingestors = [ + "malware_bazaar.MalwareBazaar", + "threatfox.ThreatFox", + ] + for ingestor in ingestors: + module = PythonModule.objects.get( + module=ingestor, + base_path="api_app.ingestors_manager.ingestors", + ) + p = Parameter.objects.create( + name="limit", + type="int", + description="Max number of results.", + is_secret=False, + required=True, + python_module=module, + ) + p.full_clean() + p.save() + + ic = IngestorConfig.objects.get(name=ingestor.split(".")[1]) + pc = PluginConfig( + value=20, + ingestor_config=ic, + for_organization=False, + owner=None, + parameter=p, + ) + pc.full_clean() + pc.save() + + +def reverse_migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PythonModule = apps.get_model("api_app", "PythonModule") + IngestorConfig = apps.get_model("ingestors_manager", "IngestorConfig") + PluginConfig = apps.get_model("api_app", "PluginConfig") + + ingestors = [ + "malware_bazaar.MalwareBazaar", + "threatfox.ThreatFox", + ] + for ingestor in ingestors: + module = PythonModule.objects.get( + module=ingestor, + base_path="api_app.ingestors_manager.ingestors", + ) + ic = IngestorConfig.objects.get(name=ingestor.split(".")[1]) + p = Parameter.objects.get( + name="limit", + type="int", + description="Max number of results.", + is_secret=False, + required=True, + python_module=module, + ) + PluginConfig.objects.get( + parameter=p, + ingestor_config=ic, + ).delete() + p.delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0065_job_mpnodesearch"), + ("ingestors_manager", "0026_alter_ingestor_config_malware_bazaar_threatfox"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/ingestors_manager/migrations/0028_ingestor_config_greedybear.py b/api_app/ingestors_manager/migrations/0028_ingestor_config_greedybear.py new file mode 100644 index 0000000000..4d29004698 --- /dev/null +++ b/api_app/ingestors_manager/migrations/0028_ingestor_config_greedybear.py @@ -0,0 +1,332 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, + ReverseManyToOneDescriptor, + ReverseOneToOneDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": { + "minute": "0", + "hour": "0", + "day_of_week": "*", + "day_of_month": "*", + "month_of_year": "*", + }, + "module": "greedybear.GreedyBear", + "base_path": "api_app.ingestors_manager.ingestors", + }, + "schedule": { + "minute": "0", + "hour": "0", + "day_of_week": "*", + "day_of_month": "*", + "month_of_year": "*", + }, + "periodic_task": { + "crontab": { + "minute": "0", + "hour": "0", + "day_of_week": "*", + "day_of_month": "*", + "month_of_year": "*", + }, + "name": "GreedyBearIngestor", + "task": "intel_owl.tasks.execute_ingestor", + "kwargs": '{"config_name": "GreedyBear"}', + "queue": "default", + "enabled": False, + }, + "user": { + "username": "GreedyBearIngestor", + "profile": { + "user": { + "username": "GreedyBearIngestor", + "email": "", + "first_name": "", + "last_name": "", + "password": "", + "is_active": True, + }, + "company_name": "", + "company_role": "", + "twitter_handle": "", + "discover_from": "other", + "task_priority": 7, + "is_robot": True, + }, + }, + "playbooks_choice": ["Popular_IP_Reputation_Services"], + "name": "GreedyBear", + "description": "Queries feeds which are generated by the [GreedyBear Project](https://intelowlproject.github.io/docs/GreedyBear/Introduction/).", + "disabled": True, + "soft_time_limit": 60, + "routing_key": "ingestor", + "health_check_status": True, + "maximum_jobs": 50, + "delay": "00:00:00", + "model": "ingestors_manager.IngestorConfig", +} + +params = [ + { + "python_module": { + "module": "greedybear.GreedyBear", + "base_path": "api_app.ingestors_manager.ingestors", + }, + "name": "url", + "type": "str", + "description": "API endpoint", + "is_secret": False, + "required": False, + }, + { + "python_module": { + "module": "greedybear.GreedyBear", + "base_path": "api_app.ingestors_manager.ingestors", + }, + "name": "limit", + "type": "int", + "description": "Max number of results.", + "is_secret": False, + "required": False, + }, + { + "python_module": { + "module": "greedybear.GreedyBear", + "base_path": "api_app.ingestors_manager.ingestors", + }, + "name": "feed_type", + "type": "str", + "description": "The available feed types are log4j, cowrie, and all.", + "is_secret": False, + "required": False, + }, + { + "python_module": { + "module": "greedybear.GreedyBear", + "base_path": "api_app.ingestors_manager.ingestors", + }, + "name": "attack_type", + "type": "str", + "description": "The available attack_type are scanner, payload_request, and all.", + "is_secret": False, + "required": False, + }, + { + "python_module": { + "module": "greedybear.GreedyBear", + "base_path": "api_app.ingestors_manager.ingestors", + }, + "name": "age", + "type": "str", + "description": "The available age are recent and persistent.", + "is_secret": False, + "required": False, + }, +] + +values = [ + { + "parameter": { + "python_module": { + "module": "greedybear.GreedyBear", + "base_path": "api_app.ingestors_manager.ingestors", + }, + "name": "url", + "type": "str", + "description": "API endpoint", + "is_secret": False, + "required": False, + }, + "analyzer_config": None, + "connector_config": None, + "visualizer_config": None, + "ingestor_config": "GreedyBear", + "pivot_config": None, + "for_organization": False, + "value": "https://greedybear.honeynet.org", + "updated_at": "2025-02-10T12:56:17.294680Z", + "owner": None, + }, + { + "parameter": { + "python_module": { + "module": "greedybear.GreedyBear", + "base_path": "api_app.ingestors_manager.ingestors", + }, + "name": "limit", + "type": "int", + "description": "Max number of results.", + "is_secret": False, + "required": False, + }, + "analyzer_config": None, + "connector_config": None, + "visualizer_config": None, + "ingestor_config": "GreedyBear", + "pivot_config": None, + "for_organization": False, + "value": 50, + "updated_at": "2025-02-10T12:56:17.302177Z", + "owner": None, + }, + { + "parameter": { + "python_module": { + "module": "greedybear.GreedyBear", + "base_path": "api_app.ingestors_manager.ingestors", + }, + "name": "feed_type", + "type": "str", + "description": "The available feed types are log4j, cowrie, and all.", + "is_secret": False, + "required": False, + }, + "analyzer_config": None, + "connector_config": None, + "visualizer_config": None, + "ingestor_config": "GreedyBear", + "pivot_config": None, + "for_organization": False, + "value": "all", + "updated_at": "2025-02-10T12:56:17.309549Z", + "owner": None, + }, + { + "parameter": { + "python_module": { + "module": "greedybear.GreedyBear", + "base_path": "api_app.ingestors_manager.ingestors", + }, + "name": "attack_type", + "type": "str", + "description": "The available attack_type are scanner, payload_request, and all.", + "is_secret": False, + "required": False, + }, + "analyzer_config": None, + "connector_config": None, + "visualizer_config": None, + "ingestor_config": "GreedyBear", + "pivot_config": None, + "for_organization": False, + "value": "all", + "updated_at": "2025-02-10T12:56:17.316766Z", + "owner": None, + }, + { + "parameter": { + "python_module": { + "module": "greedybear.GreedyBear", + "base_path": "api_app.ingestors_manager.ingestors", + }, + "name": "age", + "type": "str", + "description": "The available age are recent and persistent.", + "is_secret": False, + "required": False, + }, + "analyzer_config": None, + "connector_config": None, + "visualizer_config": None, + "ingestor_config": "GreedyBear", + "pivot_config": None, + "for_organization": False, + "value": "recent", + "updated_at": "2025-02-10T12:56:17.324439Z", + "owner": None, + }, +] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ + ForwardManyToOneDescriptor, + ReverseManyToOneDescriptor, + ReverseOneToOneDescriptor, + ForwardOneToOneDescriptor, + ] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0065_job_mpnodesearch"), + ("ingestors_manager", "0027_added_limit_parameter_malware_bazaar_threatfox"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/mixins.py b/api_app/mixins.py index fabe9e2092..96eb96f9cf 100644 --- a/api_app/mixins.py +++ b/api_app/mixins.py @@ -671,3 +671,16 @@ def _vt_get_report( result["link"] = f"https://www.virustotal.com/gui/{uri_prefix}/{uri_postfix}" return result + + +class AbuseCHMixin: + # API key to access abuse.ch services + _service_api_key: str + + @property + def authentication_header(self) -> dict: + if hasattr(self, "_service_api_key") and self._service_api_key: + logger.debug("Found auth key for abuse.ch request") + return {"Auth-Key": self._service_api_key} + + return {} diff --git a/api_app/pivots_manager/queryset.py b/api_app/pivots_manager/queryset.py index 9ccf9ad1b9..b92d1967d4 100644 --- a/api_app/pivots_manager/queryset.py +++ b/api_app/pivots_manager/queryset.py @@ -20,7 +20,6 @@ def valid( analyzers.values_list("pk", flat=True) ) ) - | Q(related_analyzer_configs=None) ) if connectors.exists(): qs = qs.many_to_many_to_array("related_connector_configs").filter( @@ -29,7 +28,6 @@ def valid( connectors.values_list("pk", flat=True) ) ) - | Q(related_connector_configs=None) ) return qs.distinct() diff --git a/api_app/playbooks_manager/migrations/0058_add_ultradns_to_free_to_use_and_dns.py b/api_app/playbooks_manager/migrations/0058_add_ultradns_to_free_to_use_and_dns.py new file mode 100644 index 0000000000..495c80cffd --- /dev/null +++ b/api_app/playbooks_manager/migrations/0058_add_ultradns_to_free_to_use_and_dns.py @@ -0,0 +1,55 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + + +from django.db import migrations + + +def migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + pc = playbook_config.objects.get(name="FREE_TO_USE_ANALYZERS") + pc2 = playbook_config.objects.get(name="Dns") + pc.analyzers.add( + AnalyzerConfig.objects.get(name="UltraDNS_DNS").id, + AnalyzerConfig.objects.get(name="UltraDNS_Malicious_Detector").id, + ) + pc2.analyzers.add( + AnalyzerConfig.objects.get(name="UltraDNS_DNS").id, + AnalyzerConfig.objects.get(name="UltraDNS_Malicious_Detector").id, + ) + pc.full_clean() + pc.save() + pc2.full_clean() + pc2.save() + + +def reverse_migrate(apps, schema_editor): + playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig") + AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig") + pc = playbook_config.objects.get(name="FREE_TO_USE_ANALYZERS") + pc2 = playbook_config.objects.get(name="Dns") + + pc.analyzers.remove( + AnalyzerConfig.objects.get(name="UltraDNS_DNS").id, + AnalyzerConfig.objects.get(name="UltraDNS_Malicious_Detector").id, + ) + pc.full_clean() + pc.save() + pc2.analyzers.remove( + AnalyzerConfig.objects.get(name="UltraDNS_DNS").id, + AnalyzerConfig.objects.get(name="UltraDNS_Malicious_Detector").id, + ) + pc2.full_clean() + pc2.save() + + +class Migration(migrations.Migration): + dependencies = [ + ("playbooks_manager", "0057_alter_phishing_extractor_add_domain"), + ("analyzers_manager", "0145_analyzer_config_ultradns_malicious_detector"), + ] + + operations = [ + migrations.RunPython(migrate, reverse_migrate), + ] diff --git a/api_app/playbooks_manager/queryset.py b/api_app/playbooks_manager/queryset.py index 35305c9cae..58b854e86f 100644 --- a/api_app/playbooks_manager/queryset.py +++ b/api_app/playbooks_manager/queryset.py @@ -30,6 +30,7 @@ def _subquery_weight_org(user: User) -> Union[Subquery, Value]: Job.objects.prefetch_related("user") .filter( user__membership__organization__pk=user.membership.organization.pk, + user__profile__is_robot=False, playbook_to_execute=OuterRef("pk"), finished_analysis_time__gte=now() - datetime.timedelta(days=30), ) @@ -46,16 +47,17 @@ def _subquery_weight_other(user: User) -> Subquery: Job.objects.filter( playbook_to_execute=OuterRef("pk"), finished_analysis_time__gte=now() - datetime.timedelta(days=30), + user__profile__is_robot=False, ) .exclude( - user__membership__organization__pk=user.membership.organization.pk + user__membership__organization__pk=user.membership.organization.pk, ) .annotate(count=Func(F("pk"), function="Count")) .values("count") ) return Subquery( Job.objects.prefetch_related("user") - .filter(playbook_to_execute=OuterRef("pk")) + .filter(playbook_to_execute=OuterRef("pk"), user__profile__is_robot=False) .exclude(user__pk=user.pk) .annotate(count=Func(F("pk"), function="Count")) .values("count") diff --git a/api_app/serializers/__init__.py b/api_app/serializers/__init__.py index 06042c238c..2b55a43256 100644 --- a/api_app/serializers/__init__.py +++ b/api_app/serializers/__init__.py @@ -6,6 +6,7 @@ from api_app.interfaces import OwnershipAbstractModel from certego_saas.apps.organization.organization import Organization from certego_saas.ext.upload.elastic import BISerializer +from intel_owl.settings._util import get_environment class AbstractBIInterface(BISerializer): @@ -36,12 +37,9 @@ def get_class_instance(instance): @staticmethod def get_environment(instance): - if settings.STAGE_PRODUCTION: - return "prod" - elif settings.STAGE_STAGING: - return "stag" - else: - return "test" + # we cannot pass directly the function to the serializer's field + # for this reason we need a function that call another function + return get_environment() @staticmethod def get_index(): diff --git a/api_app/serializers/job.py b/api_app/serializers/job.py index f6fe283441..9d9a34db3d 100644 --- a/api_app/serializers/job.py +++ b/api_app/serializers/job.py @@ -124,13 +124,13 @@ class Meta: slug_field="name", queryset=ConnectorConfig.objects.all(), many=True, - default=ConnectorConfig.objects.none(), + default=[], ) analyzers_requested = rfs.SlugRelatedField( slug_field="name", queryset=AnalyzerConfig.objects.all(), many=True, - default=AnalyzerConfig.objects.none(), + default=[], ) playbook_requested = rfs.SlugRelatedField( slug_field="name", @@ -927,6 +927,7 @@ def set_analyzers_to_execute( observable_classification: str, **kwargs, ) -> List[AnalyzerConfig]: + logger.debug(f"{analyzers_requested=} {type(analyzers_requested)=}") analyzers_to_execute = analyzers_requested.copy() partially_filtered_analyzers_qs = AnalyzerConfig.objects.filter( diff --git a/api_app/views.py b/api_app/views.py index e54a687b80..473a26d77c 100644 --- a/api_app/views.py +++ b/api_app/views.py @@ -39,6 +39,7 @@ from certego_saas.ext.viewsets import ReadAndDeleteOnlyViewSet from intel_owl import tasks from intel_owl.celery import app as celery_app +from intel_owl.settings._util import get_environment from .analyzers_manager.constants import ObservableTypes from .choices import ObservableClassification @@ -324,10 +325,12 @@ def analyze_multiple_observables(request): - 200: JSON response with the job details for each initiated analysis. """ logger.info(f"received analyze_multiple_observables from user {request.user}") + logger.debug(f"{request.data=}") oas = ObservableAnalysisSerializer( data=request.data, many=True, context={"request": request} ) oas.is_valid(raise_exception=True) + logger.debug(f"{oas.validated_data=}") parent_job = oas.validated_data[0].get("parent_job", None) jobs = oas.save(send_task=True, parent=parent_job) jrs = JobResponseSerializer(jobs, many=True).data @@ -936,7 +939,9 @@ def __aggregation_response_dynamic( if len(most_frequent_values): annotations = { - val: Count(field_name, filter=Q(**{field_name: val})) + val.replace(" ", "") + .replace("?", "") + .replace(";", ""): Count(field_name, filter=Q(**{field_name: val})) for val in most_frequent_values } logger.debug(f"request: {field_name} annotations: {annotations}") @@ -1792,7 +1797,7 @@ def get(self, request): # 3 return data elastic_response = ( - Search(index="plugin-report-*") + Search(index=f"plugin-report-{get_environment()}*") .query(QElastic("bool", filter=filter_list)) .extra(size=10000) # max allowed size .execute() diff --git a/api_app/visualizers_manager/classes.py b/api_app/visualizers_manager/classes.py index 8931ad5f3b..c800c48afa 100644 --- a/api_app/visualizers_manager/classes.py +++ b/api_app/visualizers_manager/classes.py @@ -548,3 +548,10 @@ def pivots_reports(self) -> QuerySet: from api_app.pivots_manager.models import PivotReport return PivotReport.objects.filter(job=self._job) + + def data_models(self) -> QuerySet: + from api_app.analyzers_manager.models import AnalyzerReport + + data_model_class = AnalyzerReport.get_data_model_class(self._job) + analyzer_reports_pk = [report.pk for report in self.analyzer_reports()] + return data_model_class.objects.filter(analyzers_report__in=analyzer_reports_pk) diff --git a/api_app/visualizers_manager/migrations/0040_visualizer_config_data_model.py b/api_app/visualizers_manager/migrations/0040_visualizer_config_data_model.py new file mode 100644 index 0000000000..b8a487bceb --- /dev/null +++ b/api_app/visualizers_manager/migrations/0040_visualizer_config_data_model.py @@ -0,0 +1,117 @@ +from django.db import migrations +from django.db.models.fields.related_descriptors import ( + ForwardManyToOneDescriptor, + ForwardOneToOneDescriptor, + ManyToManyDescriptor, + ReverseManyToOneDescriptor, + ReverseOneToOneDescriptor, +) + +plugin = { + "python_module": { + "health_check_schedule": None, + "update_schedule": None, + "module": "data_model.DataModel", + "base_path": "api_app.visualizers_manager.visualizers", + }, + "playbooks": ["FREE_TO_USE_ANALYZERS"], + "name": "Data_Model", + "description": "Visualizer for Data Models", + "disabled": False, + "soft_time_limit": 60, + "routing_key": "default", + "health_check_status": True, + "model": "visualizers_manager.VisualizerConfig", +} + +params = [] + +values = [] + + +def _get_real_obj(Model, field, value): + def _get_obj(Model, other_model, value): + if isinstance(value, dict): + real_vals = {} + for key, real_val in value.items(): + real_vals[key] = _get_real_obj(other_model, key, real_val) + value = other_model.objects.get_or_create(**real_vals)[0] + # it is just the primary key serialized + else: + if isinstance(value, int): + if Model.__name__ == "PluginConfig": + value = other_model.objects.get(name=plugin["name"]) + else: + value = other_model.objects.get(pk=value) + else: + value = other_model.objects.get(name=value) + return value + + if ( + type(getattr(Model, field)) + in [ + ForwardManyToOneDescriptor, + ReverseManyToOneDescriptor, + ReverseOneToOneDescriptor, + ForwardOneToOneDescriptor, + ] + and value + ): + other_model = getattr(Model, field).get_queryset().model + value = _get_obj(Model, other_model, value) + elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value: + other_model = getattr(Model, field).rel.model + value = [_get_obj(Model, other_model, val) for val in value] + return value + + +def _create_object(Model, data): + mtm, no_mtm = {}, {} + for field, value in data.items(): + value = _get_real_obj(Model, field, value) + if type(getattr(Model, field)) is ManyToManyDescriptor: + mtm[field] = value + else: + no_mtm[field] = value + try: + o = Model.objects.get(**no_mtm) + except Model.DoesNotExist: + o = Model(**no_mtm) + o.full_clean() + o.save() + for field, value in mtm.items(): + attribute = getattr(o, field) + if value is not None: + attribute.set(value) + return False + return True + + +def migrate(apps, schema_editor): + Parameter = apps.get_model("api_app", "Parameter") + PluginConfig = apps.get_model("api_app", "PluginConfig") + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + if not Model.objects.filter(name=plugin["name"]).exists(): + exists = _create_object(Model, plugin) + if not exists: + for param in params: + _create_object(Parameter, param) + for value in values: + _create_object(PluginConfig, value) + + +def reverse_migrate(apps, schema_editor): + python_path = plugin.pop("model") + Model = apps.get_model(*python_path.split(".")) + Model.objects.get(name=plugin["name"]).delete() + + +class Migration(migrations.Migration): + atomic = False + dependencies = [ + ("api_app", "0064_vt_sample_download"), + ("visualizers_manager", "0039_sample_download"), + ] + + operations = [migrations.RunPython(migrate, reverse_migrate)] diff --git a/api_app/visualizers_manager/visualizers/data_model.py b/api_app/visualizers_manager/visualizers/data_model.py new file mode 100644 index 0000000000..ced5cc1ac5 --- /dev/null +++ b/api_app/visualizers_manager/visualizers/data_model.py @@ -0,0 +1,425 @@ +from logging import getLogger +from typing import Dict, List + +from api_app.analyzers_manager.models import AnalyzerReport +from api_app.data_model_manager.enums import DataModelEvaluations +from api_app.data_model_manager.models import ( + DomainDataModel, + FileDataModel, + IPDataModel, +) +from api_app.visualizers_manager.classes import Visualizer +from api_app.visualizers_manager.decorators import ( + visualizable_error_handler_with_params, +) +from api_app.visualizers_manager.enums import VisualizableTableColumnSize + +logger = getLogger(__name__) + + +class DataModel(Visualizer): + @classmethod + def update(cls) -> bool: + pass + + @visualizable_error_handler_with_params("get_eval_list") + def get_eval_list(self, evaluation, color, icon, data_models): + disable_element = not bool(data_models) + return self.VList( + name=self.Base( + value=evaluation, + color=color if not disable_element else Visualizer.Color.TRANSPARENT, + icon=icon, + disable=False, + ), + value=[ + self.Base( + value=data_model.analyzers_report.all().first().config.name, + disable=False, + ) + for data_model in data_models + ], + size=self.Size.S_2, + disable=disable_element, + start_open=True, + ) + + @visualizable_error_handler_with_params("get_base_data_list") + def get_base_data_list(self, name, values_list): + disable_element = not bool(values_list) + return self.VList( + name=self.Base(value=name, disable=False), + value=values_list, + disable=disable_element, + start_open=True, + ) + + @visualizable_error_handler_with_params("get_field") + def get_field(self, field, data_models): + for data_model in data_models: + value = getattr(data_model, field, None) + if value: + return Visualizer.Title( + title=Visualizer.Base(value=field.replace("_", " "), disable=False), + value=Visualizer.Base( + value=value, + disable=False, + ), + disable=False, + ) + + return Visualizer.Title( + title=Visualizer.Base(value=field.replace("_", " "), disable=True), + value=Visualizer.Base( + value="", + disable=True, + ), + disable=True, + ) + + @visualizable_error_handler_with_params("get_resolutions") + def get_resolutions(self, data_models): + resolutions = [] + for data_model in data_models: + if data_model.resolutions: + resolutions.append( + self.VList( + name=self.Base( + value=data_model.analyzers_report.all().first().config.name, + disable=False, + ), + value=[ + self.Base( + value=resolution, + disable=False, + ) + for resolution in data_model.resolutions + ], + size=self.Size.S_2, + disable=False, + start_open=True, + ) + ) + return resolutions + + @visualizable_error_handler_with_params("get_pdns") + def get_pdns(self, data_models): + columns = [ + self.TableColumn( + name="rrname", max_width=VisualizableTableColumnSize.S_300 + ), + self.TableColumn(name="rrtype", max_width=VisualizableTableColumnSize.S_50), + self.TableColumn(name="rdata", max_width=VisualizableTableColumnSize.S_300), + self.TableColumn( + name="time_first", max_width=VisualizableTableColumnSize.S_100 + ), + self.TableColumn( + name="time_last", max_width=VisualizableTableColumnSize.S_100 + ), + self.TableColumn( + name="analyzer", max_width=VisualizableTableColumnSize.S_200 + ), + ] + + data = [] + for data_model in data_models: + ietf_reports = data_model.ietf_report.all() + for report in ietf_reports: + data.append( + { + "rrname": self.Base( + value=report.rrname, + color=self.Color.TRANSPARENT, + disable=False, + ), + "rrtype": self.Base( + value=report.rrtype, + color=self.Color.TRANSPARENT, + disable=False, + ), + "rdata": self.VList( + value=[ + self.Base( + value=rdata, + color=self.Color.TRANSPARENT, + disable=False, + ) + for rdata in report.rdata + ], + disable=False, + ), + "time_first": self.Base( + value=report.time_first.strftime("%Y-%m-%d %H:%M:%S"), + color=self.Color.TRANSPARENT, + disable=False, + ), + "time_last": self.Base( + value=report.time_last.strftime("%Y-%m-%d %H:%M:%S"), + color=self.Color.TRANSPARENT, + disable=False, + ), + "analyzer": self.Base( + value=data_model.analyzers_report.all().first().config.name, + color=self.Color.TRANSPARENT, + disable=False, + ), + } + ) + + return self.Table( + columns=columns, + data=data, + size=Visualizer.Size.S_ALL, + page_size=10, + sort_by_id="last_view", + sort_by_desc=True, + ) + + @visualizable_error_handler_with_params("get_signatures") + def get_signatures(self, data_models): + columns = [ + self.TableColumn( + name="provider", max_width=VisualizableTableColumnSize.S_100 + ), + self.TableColumn(name="url", max_width=VisualizableTableColumnSize.S_300), + self.TableColumn(name="score", max_width=VisualizableTableColumnSize.S_50), + self.TableColumn( + name="analyzer", max_width=VisualizableTableColumnSize.S_100 + ), + ] + + data = [] + for data_model in data_models: + signatures = data_model.signatures.all() + for signature in signatures: + data.append( + { + "provider": self.Base( + value=signature.provider, + color=self.Color.TRANSPARENT, + disable=False, + ), + "url": self.Base( + value=( + signature.url if signature.url else "No url available" + ), + link=signature.url, + color=self.Color.TRANSPARENT, + disable=not signature.url, + ), + "score": self.Base( + value=signature.score, + color=self.Color.TRANSPARENT, + disable=False, + ), + "analyzer": self.Base( + value=data_model.analyzers_report.all().first().config.name, + color=self.Color.TRANSPARENT, + disable=False, + ), + } + ) + + return self.Table( + columns=columns, + data=data, + size=Visualizer.Size.S_ALL, + page_size=10, + sort_by_id="provider", + ) + + def get_domain_data_elements(self, page, data_models): + page.add_level( + self.Level( + position=3, + size=self.LevelSize.S_4, + horizontal_list=self.HList(value=self.get_resolutions(data_models)), + ) + ) + + page.add_level( + self.Level( + position=4, + size=self.LevelSize.S_4, + horizontal_list=self.HList(value=[self.get_field("rank", data_models)]), + ) + ) + + page.add_level( + self.Level( + position=5, + size=self.LevelSize.S_5, + horizontal_list=self.HList(value=[self.get_pdns(data_models)]), + ) + ) + + def get_ip_data_elements(self, page, data_models): + page.add_level( + self.Level( + position=3, + size=self.LevelSize.S_4, + horizontal_list=self.HList(value=self.get_resolutions(data_models)), + ) + ) + + page.add_level( + self.Level( + position=4, + size=self.LevelSize.S_4, + horizontal_list=self.HList( + value=[ + self.get_field(field, data_models) + for field in [ + "asn", + "asn_rank", + "org_name", + "country_code", + "registered_country_code", + "isp", + ] + ] + ), + ) + ) + + page.add_level( + self.Level( + position=5, + size=self.LevelSize.S_5, + horizontal_list=self.HList(value=[self.get_pdns(data_models)]), + ) + ) + + def get_file_data_elements(self, page, data_models): + page.add_level( + self.Level( + position=3, + size=self.LevelSize.S_5, + horizontal_list=self.HList(value=[self.get_signatures(data_models)]), + ) + ) + + def run(self) -> List[Dict]: + trusted_data_models = [] + clean_data_models = [] + suspicious_data_models = [] + malicious_data_models = [] + noeval_data_models = [] + data_models = self.data_models() + + for data_model in data_models: + printable_analyzer_name = ( + data_model.analyzers_report.all().first().config.name.replace("_", " ") + ) + logger.debug(f"{printable_analyzer_name}, {data_model}") + + evaluation = "" + if data_model.evaluation: + evaluation = data_model.evaluation + + if evaluation == DataModelEvaluations.TRUSTED.value: + trusted_data_models.append(data_model) + elif evaluation == DataModelEvaluations.CLEAN.value: + clean_data_models.append(data_model) + elif evaluation == DataModelEvaluations.SUSPICIOUS.value: + suspicious_data_models.append(data_model) + elif evaluation == DataModelEvaluations.MALICIOUS.value: + malicious_data_models.append(data_model) + else: + noeval_data_models.append(data_model) + + evals_vlists = [] + for evaluation, color, icon, eval_data_models in [ + ( + "no evaluation", + Visualizer.Color.SECONDARY, + Visualizer.Icon.INFO, + noeval_data_models, + ), + ( + DataModelEvaluations.CLEAN.value, + Visualizer.Color.SUCCESS, + Visualizer.Icon.LIKE, + clean_data_models, + ), + ( + DataModelEvaluations.TRUSTED.value, + Visualizer.Color.SUCCESS, + Visualizer.Icon.LIKE, + trusted_data_models, + ), + ( + DataModelEvaluations.SUSPICIOUS.value, + Visualizer.Color.WARNING, + Visualizer.Icon.WARNING, + suspicious_data_models, + ), + ( + DataModelEvaluations.MALICIOUS.value, + Visualizer.Color.DANGER, + Visualizer.Icon.MALWARE, + malicious_data_models, + ), + ]: + evals_vlists.append( + self.get_eval_list(evaluation, color, icon, eval_data_models) + ) + + related_threats = [] + external_references = [] + malware_families = [] + tags = [] + + for data_model in data_models: + related_threats.extend(data_model.related_threats) + external_references.extend(data_model.external_references) + if data_model.malware_family: + malware_families.append(data_model.malware_family) + if data_model.tags: + tags.extend(data_model.tags) + + related_threats = list(set(related_threats)) + external_references = list(set(external_references)) + malware_families = list(set(malware_families)) + tags = list(set(tags)) + + base_data_vlists = [] + for name, values_list in [ + ("Tags", tags), + ("Related threats", related_threats), + ("Malware families", malware_families), + ("External references", external_references), + ]: + base_data_vlists.append(self.get_base_data_list(name, values_list)) + + page = self.Page(name="DataModel") + page.add_level( + self.Level( + position=1, + size=self.LevelSize.S_4, + horizontal_list=self.HList(value=evals_vlists), + ) + ) + + page.add_level( + self.Level( + position=2, + size=self.LevelSize.S_4, + horizontal_list=self.HList(value=base_data_vlists), + ) + ) + + data_model_class = AnalyzerReport.get_data_model_class(self._job) + if data_model_class == DomainDataModel: + self.get_domain_data_elements(page, data_models) + elif data_model_class == IPDataModel: + self.get_ip_data_elements(page, data_models) + elif data_model_class == FileDataModel: + self.get_file_data_elements(page, data_models) + + return [page.to_dict()] + + @classmethod + def _monkeypatch(cls): + patches = [] + return super()._monkeypatch(patches=patches) diff --git a/docker/.env b/docker/.env index 5117a613dc..2c83d70c03 100644 --- a/docker/.env +++ b/docker/.env @@ -1,6 +1,6 @@ ### DO NOT CHANGE THIS VALUE !! ### It should be updated only when you pull latest changes off from the 'master' branch of IntelOwl. # this variable must start with "REACT_APP_" to be used in the frontend too -REACT_APP_INTELOWL_VERSION=v6.2.1 +REACT_APP_INTELOWL_VERSION=v6.3.0 # if you want to use a nfs volume for shared files # NFS_ADDRESS= diff --git a/docker/Dockerfile b/docker/Dockerfile index e1f7191024..de5c5c5a5f 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -15,14 +15,15 @@ RUN npm install npm@latest --location=global \ # Stage 2: Backend FROM python:3.11.7 AS backend-build -ENV PYTHONUNBUFFERED 1 -ENV DJANGO_SETTINGS_MODULE intel_owl.settings -ENV PYTHONPATH /opt/deploy/intel_owl -ENV LOG_PATH /var/log/intel_owl +ENV PYTHONUNBUFFERED=1 +ENV DJANGO_SETTINGS_MODULE=intel_owl.settings +ENV PYTHONPATH=/opt/deploy/intel_owl +ENV LOG_PATH=/var/log/intel_owl ARG REPO_DOWNLOADER_ENABLED=true ARG WATCHMAN=false -ENV watch_logs_cmd "watch -n1 tail -n10 /var/log/intel_owl/django/api_app.log" -ARG PYCTI_VERSION=6.1.0 +ENV watch_logs_cmd="watch -n1 tail -n10 /var/log/intel_owl/django/api_app.log" +# This is required to allow compatibility with different OpenCTI instances +ARG PYCTI_VERSION=6.5.1 RUN mkdir -p ${LOG_PATH} \ ${LOG_PATH}/django \ @@ -34,18 +35,15 @@ RUN mkdir -p ${LOG_PATH} \ # python3-psycopg2 is required to use PostgresSQL with Django # apache2-utils is required to execute htpasswd # tshark is required for Hfinger file analyzer +# libemail-outlook-message-perl and libemail-address-perl are required for msgconvert RUN apt-get update \ && apt-get install -y --no-install-recommends apt-utils libsasl2-dev libssl-dev netcat-traditional \ - vim libldap2-dev libfuzzy-dev net-tools python3-psycopg2 git apache2-utils tshark \ - && apt-get clean \ + vim libldap2-dev libfuzzy-dev net-tools python3-psycopg2 git apache2-utils tshark \ + libemail-outlook-message-perl libemail-address-perl \ + && apt-get clean && apt-get autoclean && apt-get autoremove -y \ && rm -rf /var/lib/apt/lists/* \ && pip3 install --no-cache-dir --upgrade pip -# perl not interactive -ENV PERL_MM_USE_DEFAULT 1 -# msgconvert -RUN cpan -T Email::Outlook::Message - COPY requirements/project-requirements.txt $PYTHONPATH/project-requirements.txt COPY requirements/certego-requirements.txt $PYTHONPATH/certego-requirements.txt WORKDIR $PYTHONPATH @@ -77,5 +75,5 @@ COPY --from=frontend-build /build /var/www/reactapp # HOME_DIR = f"{Path.home()}/.quark-engine/" # Path(HOME_DIR).mkdir(parents=True, exist_ok=True) # so we have to set the home env variable to allow to create its directory -ENV HOME ${PYTHONPATH} +ENV HOME="${PYTHONPATH}" diff --git a/docker/Dockerfile_nginx b/docker/Dockerfile_nginx index 134f3da1fe..bb6b812bc8 100644 --- a/docker/Dockerfile_nginx +++ b/docker/Dockerfile_nginx @@ -1,6 +1,6 @@ -FROM library/nginx:1.27.0-alpine +FROM library/nginx:1.27.3-alpine -ENV NGINX_LOG_DIR /var/log/nginx +ENV NGINX_LOG_DIR=/var/log/nginx # this is to avoid having these logs redirected to stdout/stderr RUN rm $NGINX_LOG_DIR/access.log $NGINX_LOG_DIR/error.log && touch $NGINX_LOG_DIR/access.log $NGINX_LOG_DIR/error.log VOLUME /var/log/nginx diff --git a/docker/hooks/build b/docker/hooks/build new file mode 100644 index 0000000000..fa3372f7e4 --- /dev/null +++ b/docker/hooks/build @@ -0,0 +1,19 @@ +#!/bin/bash + +echo "display path" +echo "$(pwd)" +echo "display dockerfile path" +echo $DOCKERFILE_PATH +cd .. +echo "current branch" +echo "$SOURCE_BRANCH" + +version_regex='^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$' +if [[ "$SOURCE_BRANCH" == "master" || "$SOURCE_BRANCH" =~ $version_regex ]]; then + echo "The branch is master, proceeding with multi-arch build" + docker buildx create --name multiarch --use + docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --platform linux/arm64,linux/amd64 --push . +else + echo "The branch is not master, proceeding with classic build" + docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --push . +fi \ No newline at end of file diff --git a/docker/hooks/post_push b/docker/hooks/post_push new file mode 100644 index 0000000000..ebe7a8a930 --- /dev/null +++ b/docker/hooks/post_push @@ -0,0 +1,5 @@ +#!/bin/bash + +if [[ "$SOURCE_BRANCH" == "master" ]]; then + curl -d "text=A new production image has been pushed to Docker Hub" -d "channel=$SLACK_CHANNEL" -H "Authorization: Bearer $SLACK_TOKEN" -X POST https://slack.com/api/chat.postMessage +fi \ No newline at end of file diff --git a/frontend/package-lock.json b/frontend/package-lock.json index f33df493e7..7e80aaef7a 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -1,12 +1,12 @@ { "name": "intelowl", - "version": "6.1.0", + "version": "6.2.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "intelowl", - "version": "6.1.0", + "version": "6.2.0", "dependencies": { "@certego/certego-ui": "^0.1.13", "@dagrejs/dagre": "^1.1.4", diff --git a/frontend/package.json b/frontend/package.json index 95652e44d0..fed81859fb 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,6 +1,6 @@ { "name": "intelowl", - "version": "6.2.1", + "version": "6.3.0", "private": true, "proxy": "http://localhost:80/", "dependencies": { diff --git a/frontend/src/components/GuideWrapper.jsx b/frontend/src/components/GuideWrapper.jsx index 1006237a78..8a692e8640 100644 --- a/frontend/src/components/GuideWrapper.jsx +++ b/frontend/src/components/GuideWrapper.jsx @@ -3,6 +3,7 @@ import Joyride from "react-joyride"; import { Outlet, useNavigate } from "react-router-dom"; import { useMount } from "react-use"; import { useGuideContext } from "../contexts/GuideContext"; +import { INTELOWL_DOCS_URL } from "../constants/environment"; export default function GuideWrapper() { const { guideState, setGuideState } = useGuideContext(); @@ -17,8 +18,7 @@ export default function GuideWrapper() {

Welcome to IntelOwls Guide for First Time Visitors! For further questions you could either check out our{" "} - docs or reach - us out on{" "} + docs or reach us out on{" "} the official IntelOwl slack channel diff --git a/frontend/src/components/common/flows/getLayoutedElements.js b/frontend/src/components/common/flows/getLayoutedElements.js new file mode 100644 index 0000000000..b862a2cfe5 --- /dev/null +++ b/frontend/src/components/common/flows/getLayoutedElements.js @@ -0,0 +1,38 @@ +import dagre from "@dagrejs/dagre"; + +/* eslint-disable id-length */ +export function getLayoutedElements( + nodes, + edges, + nodeWidth, + nodeHeight, + deltaX, + deltaY, +) { + // needed for graph layout + const dagreGraph = new dagre.graphlib.Graph(); + dagreGraph.setDefaultEdgeLabel(() => ({})); + + dagreGraph.setGraph({ rankdir: "LR" }); + + nodes.forEach((node) => { + dagreGraph.setNode(node.id, { width: nodeWidth, height: nodeHeight }); + }); + + edges.forEach((edge) => { + dagreGraph.setEdge(edge.source, edge.target); + }); + + dagre.layout(dagreGraph); + + nodes.forEach((node) => { + const nodeWithPosition = dagreGraph.node(node.id); + // eslint-disable-next-line no-param-reassign + node.position = { + x: nodeWithPosition.x - nodeWidth / 2 + deltaX, + y: nodeWithPosition.y - nodeHeight / 2 + deltaY, + }; + return node; + }); + return { nodes, edges }; +} diff --git a/frontend/src/components/common/form/TLPSelectInput.jsx b/frontend/src/components/common/form/TLPSelectInput.jsx index abea221f18..1784975836 100644 --- a/frontend/src/components/common/form/TLPSelectInput.jsx +++ b/frontend/src/components/common/form/TLPSelectInput.jsx @@ -14,6 +14,7 @@ import { TLPDescriptions } from "../../../constants/miscConst"; import { TlpChoices } from "../../../constants/advancedSettingsConst"; import { TLPTag } from "../TLPTag"; import { TLPColors } from "../../../constants/colorConst"; +import { INTELOWL_DOCS_URL } from "../../../constants/environment"; export function TLPSelectInputLabel(props) { const { size } = props; @@ -36,7 +37,7 @@ export function TLPSelectInputLabel(props) {
For more info check the{" "} official doc. diff --git a/frontend/src/components/dashboard/charts.jsx b/frontend/src/components/dashboard/charts.jsx index f16f034571..95a8857a29 100644 --- a/frontend/src/components/dashboard/charts.jsx +++ b/frontend/src/components/dashboard/charts.jsx @@ -1,5 +1,6 @@ import React from "react"; import { Bar } from "recharts"; +import PropTypes from "prop-types"; import { getRandomColorsArray, AnyChartWidget } from "@certego/certego-ui"; @@ -215,3 +216,31 @@ export const JobTopTLPBarChart = React.memo((props) => { return ; }); + +JobStatusBarChart.propTypes = { + orgName: PropTypes.string.isRequired, +}; + +JobTypeBarChart.propTypes = { + orgName: PropTypes.string.isRequired, +}; + +JobObsClassificationBarChart.propTypes = { + orgName: PropTypes.string.isRequired, +}; + +JobFileMimetypeBarChart.propTypes = { + orgName: PropTypes.string.isRequired, +}; + +JobTopPlaybookBarChart.propTypes = { + orgName: PropTypes.string.isRequired, +}; + +JobTopUserBarChart.propTypes = { + orgName: PropTypes.string.isRequired, +}; + +JobTopTLPBarChart.propTypes = { + orgName: PropTypes.string.isRequired, +}; diff --git a/frontend/src/components/investigations/flow/utils.js b/frontend/src/components/investigations/flow/utils.js index 40939ff2f8..ec42dc5174 100644 --- a/frontend/src/components/investigations/flow/utils.js +++ b/frontend/src/components/investigations/flow/utils.js @@ -1,4 +1,4 @@ -import dagre from "@dagrejs/dagre"; +import { getLayoutedElements } from "../../common/flows/getLayoutedElements"; import { JobFinalStatuses } from "../../../constants/jobConst"; /* eslint-disable id-length */ @@ -53,38 +53,6 @@ function addEdge(edges, job, parentType, parentId) { } } -function getLayoutedElements(nodes, edges) { - // needed for graph layout - const dagreGraph = new dagre.graphlib.Graph(); - dagreGraph.setDefaultEdgeLabel(() => ({})); - - const nodeWidth = 300; - const nodeHeight = 60; - - dagreGraph.setGraph({ rankdir: "LR" }); - - nodes.forEach((node) => { - dagreGraph.setNode(node.id, { width: nodeWidth, height: nodeHeight }); - }); - - edges.forEach((edge) => { - dagreGraph.setEdge(edge.source, edge.target); - }); - - dagre.layout(dagreGraph); - - nodes.forEach((node) => { - const nodeWithPosition = dagreGraph.node(node.id); - // eslint-disable-next-line no-param-reassign - node.position = { - x: nodeWithPosition.x - nodeWidth / 2 + 150, - y: nodeWithPosition.y - nodeHeight / 2 + 70, - }; - return node; - }); - return { nodes, edges }; -} - export function getNodesAndEdges( investigationTree, investigationId, @@ -131,6 +99,10 @@ export function getNodesAndEdges( const { nodes: layoutedNodes, edges: layoutedEdges } = getLayoutedElements( jobsNodes, jobsEdges, + 300, + 60, + 150, + 70, ); return [ initialNode.concat(layoutedNodes), diff --git a/frontend/src/components/plugins/PluginConfigModal.jsx b/frontend/src/components/plugins/PluginConfigModal.jsx index ccfd444b05..17743b89f3 100644 --- a/frontend/src/components/plugins/PluginConfigModal.jsx +++ b/frontend/src/components/plugins/PluginConfigModal.jsx @@ -9,6 +9,7 @@ import { AnalyzerConfigForm } from "./forms/AnalyzerConfigForm"; import { PivotConfigForm } from "./forms/PivotConfigForm"; import { PlaybookConfigForm } from "./forms/PlaybookConfigForm"; import { PluginConfigContainer } from "./PluginConfigContainer"; +import { INTELOWL_DOCS_URL } from "../../constants/environment"; export function PluginConfigModal({ pluginConfig, @@ -45,7 +46,7 @@ export function PluginConfigModal({
For more info check the{" "} official doc. diff --git a/frontend/src/components/plugins/flows/CustomPivotNode.jsx b/frontend/src/components/plugins/flows/CustomPivotNode.jsx new file mode 100644 index 0000000000..0c5ace8fb1 --- /dev/null +++ b/frontend/src/components/plugins/flows/CustomPivotNode.jsx @@ -0,0 +1,89 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { Handle, Position, NodeToolbar } from "reactflow"; +import "reactflow/dist/style.css"; +import { Badge } from "reactstrap"; +import { IoMdWarning } from "react-icons/io"; + +function CustomPivotNode({ data }) { + return ( + <> + + Pivot + {data?.warning && ( + + )} + + {/* Info */} + + {data?.warning && ( +

+ + + This pivot will always fail as the playbook to execute is not + configured or enabled + +
+ )} + + Analyzers: + {data?.analyzers || "-"} + + + Connectors: + {data?.connectors || "-"} + + + Type: + {data?.type} + + + Field to analyze: + {data?.fieldToCompare || "-"} + + +
+ {data?.label} +
+ + + + ); +} + +CustomPivotNode.propTypes = { + data: PropTypes.object.isRequired, +}; + +export default React.memo(CustomPivotNode); diff --git a/frontend/src/components/plugins/flows/CustomPlaybookNode.jsx b/frontend/src/components/plugins/flows/CustomPlaybookNode.jsx new file mode 100644 index 0000000000..058ec5b812 --- /dev/null +++ b/frontend/src/components/plugins/flows/CustomPlaybookNode.jsx @@ -0,0 +1,71 @@ +import React from "react"; +import PropTypes from "prop-types"; +import { Handle, Position, NodeToolbar } from "reactflow"; +import "reactflow/dist/style.css"; +import { Badge } from "reactstrap"; +import { IoMdWarning } from "react-icons/io"; + +function CustomPlaybookNode({ data }) { + return ( + <> + {/* Badge */} + + + Playbook + + + {/* Info */} + + {!data.configured && } + + {data?.description} + + +
+ {data?.label} +
+ + + + ); +} + +CustomPlaybookNode.propTypes = { + data: PropTypes.object.isRequired, +}; + +export default React.memo(CustomPlaybookNode); diff --git a/frontend/src/components/plugins/flows/PlaybookFlows.jsx b/frontend/src/components/plugins/flows/PlaybookFlows.jsx new file mode 100644 index 0000000000..23858991e9 --- /dev/null +++ b/frontend/src/components/plugins/flows/PlaybookFlows.jsx @@ -0,0 +1,82 @@ +/* eslint-disable id-length */ +import React from "react"; +import PropTypes from "prop-types"; +import ReactFlow, { Controls, useNodesState, useEdgesState } from "reactflow"; +import "reactflow/dist/style.css"; + +import CustomPlaybookNode from "./CustomPlaybookNode"; +import CustomPivotNode from "./CustomPivotNode"; +import { getNodesAndEdges } from "./utils"; +import { usePluginConfigurationStore } from "../../../stores/usePluginConfigurationStore"; + +// Important! This must be defined outside of the component +const nodeTypes = { + playbookNode: CustomPlaybookNode, + pivotNode: CustomPivotNode, +}; + +const defaultEdgeOptions = { + style: { strokeWidth: 3 }, + type: "step", +}; + +export function PlaybookFlows({ playbook }) { + const [nodes, setNodes, onNodesChange] = useNodesState([]); + const [edges, setEdges, onEdgesChange] = useEdgesState([]); + + // API/ store + const [pivotsLoading, pivotStored, playbooksLoading, playbooksStored] = + usePluginConfigurationStore((state) => [ + state.pivotsLoading, + state.pivots, + state.playbooksLoading, + state.playbooks, + ]); + + React.useEffect(() => { + if (!pivotsLoading && !playbooksLoading) { + const [initialNodes, initialEdges] = getNodesAndEdges( + playbook, + pivotStored, + playbooksStored, + ); + setNodes(initialNodes); + setEdges(initialEdges); + } + }, [ + playbook, + pivotsLoading, + playbooksLoading, + setNodes, + setEdges, + pivotStored, + playbooksStored, + ]); + + return ( +
+ + + +
+ ); +} + +PlaybookFlows.propTypes = { + playbook: PropTypes.object.isRequired, +}; diff --git a/frontend/src/components/plugins/flows/utils.js b/frontend/src/components/plugins/flows/utils.js new file mode 100644 index 0000000000..f758e52f3a --- /dev/null +++ b/frontend/src/components/plugins/flows/utils.js @@ -0,0 +1,160 @@ +import { MarkerType } from "reactflow"; +import { getLayoutedElements } from "../../common/flows/getLayoutedElements"; + +/* eslint-disable id-length */ +function addNode( + nodesList, + edgesList, + nodeType, + nodeToAdd, + pivotsStored, + playbooksStored, +) { + const node = { + id: `${nodeType}-${nodeToAdd.id}`, + data: { + id: nodeToAdd.id, + label: nodeToAdd.name, + }, + draggable: false, + }; + + if (nodeType === "pivot") { + node.type = "pivotNode"; + node.data = { + ...node.data, + analyzers: nodeToAdd?.related_analyzer_configs?.toString(), + connectors: nodeToAdd?.related_connector_configs?.toString(), + type: nodeToAdd?.python_module, + fieldToCompare: nodeToAdd?.params?.field_to_compare?.value, + }; + } else { + node.type = "playbookNode"; + node.data = { + ...node.data, + description: nodeToAdd?.description, + configured: nodeToAdd?.configured, + }; + } + nodesList.push(node); + + // recursive call if there are children + if (nodeToAdd.pivots) { + nodeToAdd.pivots.forEach((child) => { + const pivotConfig = pivotsStored.find((plugin) => plugin.name === child); + addNode( + nodesList, + edgesList, + "pivot", + pivotConfig, + pivotsStored, + playbooksStored, + ); + // add edge + edgesList.push({ + id: `edge-${nodeType}${nodeToAdd.id}-pivot${pivotConfig.id}`, + source: `${nodeType}-${nodeToAdd.id}`, + target: `pivot-${pivotConfig.id}`, + }); + }); + } else if (nodeToAdd.playbooks_choice) { + nodeToAdd.playbooks_choice.forEach((child) => { + let playbookConfig = {}; + playbookConfig = playbooksStored.find((plugin) => plugin.name === child); + if (playbookConfig === undefined) { + playbookConfig = {}; + playbookConfig.id = `${child}`; + playbookConfig.name = child; + playbookConfig.description = + "The playbook is not enabled or configured for this user."; + playbookConfig.pivots = []; + playbookConfig.configured = false; + + // set warning in the current pivot, it will always fail + const pivotIndex = nodesList.findIndex( + (pivotNode) => pivotNode.id === `${nodeType}-${nodeToAdd.id}`, + ); + const pivotNode = nodesList[pivotIndex]; + pivotNode.data.warning = true; + } else { + playbookConfig.configured = true; + } + addNode( + nodesList, + edgesList, + "playbook", + playbookConfig, + pivotsStored, + playbooksStored, + ); + // add edge + edgesList.push({ + id: `edge-${nodeType}${nodeToAdd.id}-playbook${playbookConfig.id}`, + source: `${nodeType}-${nodeToAdd.id}`, + target: `playbook-${playbookConfig.id}`, + markerEnd: { + type: MarkerType.ArrowClosed, + }, + }); + }); + } +} + +export function getNodesAndEdges(playbook, pivotsStored, playbooksStored) { + // nodes + const initialNode = [ + { + id: `playbook-${playbook.id}`, + position: { x: 0, y: 0 }, + data: { + id: playbook.id, + label: playbook.name, + description: playbook.description, + configured: true, + }, + type: "playbookNode", + draggable: false, + }, + ]; + const nodes = []; + + // edges + const initialEdges = []; + const edges = []; + + if (playbook.pivots.length) { + playbook.pivots.forEach((pivotToExecute) => { + const pivotConfig = pivotsStored.find( + (plugin) => plugin.name === pivotToExecute, + ); + addNode( + nodes, + edges, + "pivot", + pivotConfig, + pivotsStored, + playbooksStored, + ); + edges.push({ + id: `edge-playbook${playbook.id}-pivot${pivotConfig.id}`, + source: `playbook-${playbook.id}`, + target: `pivot-${pivotConfig.id}`, + }); + }); + } + + if (edges.length) { + const { nodes: layoutedNodes, edges: layoutedEdges } = getLayoutedElements( + initialNode.concat(nodes), + initialEdges.concat(edges), + 300, + 60, + 50, + 50, + ); + return [layoutedNodes, layoutedEdges]; + } + + initialNode[0].position = { x: 50, y: 30 }; + return [initialNode, initialEdges]; +} diff --git a/frontend/src/components/plugins/forms/PluginConfigForm.jsx b/frontend/src/components/plugins/forms/PluginConfigForm.jsx index b734cf5c78..3edfed7200 100644 --- a/frontend/src/components/plugins/forms/PluginConfigForm.jsx +++ b/frontend/src/components/plugins/forms/PluginConfigForm.jsx @@ -50,6 +50,7 @@ function CustomInput({ formik, config, configType, disabledInputField }) { : "bg-darker border-0" } disabled={disabledInputField} + autoComplete={config.is_secret ? "off" : "on"} /> This field must be a number. @@ -70,6 +71,7 @@ function CustomInput({ formik, config, configType, disabledInputField }) { : "bg-darker border-0" } disabled={disabledInputField} + autoComplete={config.is_secret ? "off" : "on"} /> ); case ParameterTypes.STR: @@ -91,6 +93,7 @@ function CustomInput({ formik, config, configType, disabledInputField }) { : "bg-darker border-0" } disabled={disabledInputField} + autoComplete={config.is_secret ? "off" : "on"} /> ); case ParameterTypes.DICT: @@ -179,6 +182,7 @@ function CustomInput({ formik, config, configType, disabledInputField }) { }} value={value} disabled={disabledInputField} + autoComplete={config.is_secret ? "off" : "on"} /> diff --git a/frontend/src/constants/environment.js b/frontend/src/constants/environment.js index ba62a4da87..17f4fc68fd 100644 --- a/frontend/src/constants/environment.js +++ b/frontend/src/constants/environment.js @@ -3,6 +3,7 @@ export const INTELOWL_DOCS_URL = "https://intelowlproject.github.io/docs/"; export const PYINTELOWL_GH_URL = "https://github.com/intelowlproject/pyintelowl"; export const INTELOWL_TWITTER_ACCOUNT = "intel_owl"; +export const INTELOWL_REPO_URL = "https://github.com/intelowlproject/IntelOwl/"; // env variables export const VERSION = process.env.REACT_APP_INTELOWL_VERSION; diff --git a/frontend/tests/components/dashboard/charts.test.jsx b/frontend/tests/components/dashboard/charts.test.jsx index 2fb230daed..6a06ebebf4 100644 --- a/frontend/tests/components/dashboard/charts.test.jsx +++ b/frontend/tests/components/dashboard/charts.test.jsx @@ -17,6 +17,7 @@ jest.mock("recharts", () => { const OriginalModule = jest.requireActual("recharts"); return { ...OriginalModule, + // eslint-disable-next-line react/prop-types ResponsiveContainer: ({ children }) => ( {children} @@ -72,17 +73,19 @@ describe("test dashboard's charts", () => { // needed to support different timezones (ex: ci and local could be different) expect( screen.getByText( - `28/11, ${new Date("2024-11-28T22:00:00Z").getHours()}:00`, + `${new Date("2024-11-28T22:00:00Z").getDate()}/${new Date("2024-11-28T22:00:00Z").getMonth() + 1}, ${new Date("2024-11-28T22:00:00Z").getHours()}:00`, ), ).toBeInTheDocument(); expect( screen.getByText( - `29/11, ${new Date("2024-11-29T22:00:00Z").getHours()}:00`, + `${new Date("2024-11-29T22:00:00Z").getDate()}/${new Date("2024-11-29T22:00:00Z").getMonth() + 1}, ${new Date("2024-11-29T22:00:00Z").getHours()}:00`, ), ).toBeInTheDocument(); + let hours = new Date("2024-11-29T23:00:00Z").getHours(); + if (hours === 0) hours = "00"; expect( screen.getByText( - `29/11, ${new Date("2024-11-29T23:00:00Z").getHours()}:00`, + `${new Date("2024-11-29T23:00:00Z").getDate()}/${new Date("2024-11-29T23:00:00Z").getMonth() + 1}, ${hours}:00`, ), ).toBeInTheDocument(); expect(screen.getByText("pending")).toBeInTheDocument(); @@ -136,17 +139,19 @@ describe("test dashboard's charts", () => { // needed to support different timezones (ex: ci and local could be different) expect( screen.getByText( - `28/11, ${new Date("2024-11-28T22:00:00Z").getHours()}:00`, + `${new Date("2024-11-28T22:00:00Z").getDate()}/${new Date("2024-11-28T22:00:00Z").getMonth() + 1}, ${new Date("2024-11-28T22:00:00Z").getHours()}:00`, ), ).toBeInTheDocument(); expect( screen.getByText( - `29/11, ${new Date("2024-11-29T22:00:00Z").getHours()}:00`, + `${new Date("2024-11-29T22:00:00Z").getDate()}/${new Date("2024-11-29T22:00:00Z").getMonth() + 1}, ${new Date("2024-11-29T22:00:00Z").getHours()}:00`, ), ).toBeInTheDocument(); + let hours = new Date("2024-11-29T23:00:00Z").getHours(); + if (hours === 0) hours = "00"; expect( screen.getByText( - `29/11, ${new Date("2024-11-29T23:00:00Z").getHours()}:00`, + `${new Date("2024-11-29T23:00:00Z").getDate()}/${new Date("2024-11-29T23:00:00Z").getMonth() + 1}, ${hours}:00`, ), ).toBeInTheDocument(); expect(screen.getByText("file")).toBeInTheDocument(); @@ -207,17 +212,19 @@ describe("test dashboard's charts", () => { // needed to support different timezones (ex: ci and local could be different) expect( screen.getByText( - `28/11, ${new Date("2024-11-28T22:00:00Z").getHours()}:00`, + `${new Date("2024-11-28T22:00:00Z").getDate()}/${new Date("2024-11-28T22:00:00Z").getMonth() + 1}, ${new Date("2024-11-28T22:00:00Z").getHours()}:00`, ), ).toBeInTheDocument(); expect( screen.getByText( - `29/11, ${new Date("2024-11-29T22:00:00Z").getHours()}:00`, + `${new Date("2024-11-29T22:00:00Z").getDate()}/${new Date("2024-11-29T22:00:00Z").getMonth() + 1}, ${new Date("2024-11-29T22:00:00Z").getHours()}:00`, ), ).toBeInTheDocument(); + let hours = new Date("2024-11-29T23:00:00Z").getHours(); + if (hours === 0) hours = "00"; expect( screen.getByText( - `29/11, ${new Date("2024-11-29T23:00:00Z").getHours()}:00`, + `${new Date("2024-11-29T23:00:00Z").getDate()}/${new Date("2024-11-29T23:00:00Z").getMonth() + 1}, ${hours}:00`, ), ).toBeInTheDocument(); expect(screen.getByText("ip")).toBeInTheDocument(); @@ -275,17 +282,19 @@ describe("test dashboard's charts", () => { // needed to support different timezones (ex: ci and local could be different) expect( screen.getByText( - `28/11, ${new Date("2024-11-28T22:00:00Z").getHours()}:00`, + `${new Date("2024-11-28T22:00:00Z").getDate()}/${new Date("2024-11-28T22:00:00Z").getMonth() + 1}, ${new Date("2024-11-28T22:00:00Z").getHours()}:00`, ), ).toBeInTheDocument(); expect( screen.getByText( - `29/11, ${new Date("2024-11-29T22:00:00Z").getHours()}:00`, + `${new Date("2024-11-29T22:00:00Z").getDate()}/${new Date("2024-11-29T22:00:00Z").getMonth() + 1}, ${new Date("2024-11-29T22:00:00Z").getHours()}:00`, ), ).toBeInTheDocument(); + let hours = new Date("2024-11-29T23:00:00Z").getHours(); + if (hours === 0) hours = "00"; expect( screen.getByText( - `29/11, ${new Date("2024-11-29T23:00:00Z").getHours()}:00`, + `${new Date("2024-11-29T23:00:00Z").getDate()}/${new Date("2024-11-29T23:00:00Z").getMonth() + 1}, ${hours}:00`, ), ).toBeInTheDocument(); expect(screen.getByText("application/json")).toBeInTheDocument(); @@ -343,17 +352,19 @@ describe("test dashboard's charts", () => { // needed to support different timezones (ex: ci and local could be different) expect( screen.getByText( - `28/11, ${new Date("2024-11-28T22:00:00Z").getHours()}:00`, + `${new Date("2024-11-28T22:00:00Z").getDate()}/${new Date("2024-11-28T22:00:00Z").getMonth() + 1}, ${new Date("2024-11-28T22:00:00Z").getHours()}:00`, ), ).toBeInTheDocument(); expect( screen.getByText( - `29/11, ${new Date("2024-11-29T22:00:00Z").getHours()}:00`, + `${new Date("2024-11-29T22:00:00Z").getDate()}/${new Date("2024-11-29T22:00:00Z").getMonth() + 1}, ${new Date("2024-11-29T22:00:00Z").getHours()}:00`, ), ).toBeInTheDocument(); + let hours = new Date("2024-11-29T23:00:00Z").getHours(); + if (hours === 0) hours = "00"; expect( screen.getByText( - `29/11, ${new Date("2024-11-29T23:00:00Z").getHours()}:00`, + `${new Date("2024-11-29T23:00:00Z").getDate()}/${new Date("2024-11-29T23:00:00Z").getMonth() + 1}, ${hours}:00`, ), ).toBeInTheDocument(); expect(screen.getByText("Dns")).toBeInTheDocument(); @@ -412,17 +423,19 @@ describe("test dashboard's charts", () => { // needed to support different timezones (ex: ci and local could be different) expect( screen.getByText( - `28/11, ${new Date("2024-11-28T22:00:00Z").getHours()}:00`, + `${new Date("2024-11-28T22:00:00Z").getDate()}/${new Date("2024-11-28T22:00:00Z").getMonth() + 1}, ${new Date("2024-11-28T22:00:00Z").getHours()}:00`, ), ).toBeInTheDocument(); expect( screen.getByText( - `29/11, ${new Date("2024-11-29T22:00:00Z").getHours()}:00`, + `${new Date("2024-11-29T22:00:00Z").getDate()}/${new Date("2024-11-29T22:00:00Z").getMonth() + 1}, ${new Date("2024-11-29T22:00:00Z").getHours()}:00`, ), ).toBeInTheDocument(); + let hours = new Date("2024-11-29T23:00:00Z").getHours(); + if (hours === 0) hours = "00"; expect( screen.getByText( - `29/11, ${new Date("2024-11-29T23:00:00Z").getHours()}:00`, + `${new Date("2024-11-29T23:00:00Z").getDate()}/${new Date("2024-11-29T23:00:00Z").getMonth() + 1}, ${hours}:00`, ), ).toBeInTheDocument(); expect(screen.getByText("user_a")).toBeInTheDocument(); @@ -481,17 +494,19 @@ describe("test dashboard's charts", () => { // needed to support different timezones (ex: ci and local could be different) expect( screen.getByText( - `28/11, ${new Date("2024-11-28T22:00:00Z").getHours()}:00`, + `${new Date("2024-11-28T22:00:00Z").getDate()}/${new Date("2024-11-28T22:00:00Z").getMonth() + 1}, ${new Date("2024-11-28T22:00:00Z").getHours()}:00`, ), ).toBeInTheDocument(); expect( screen.getByText( - `29/11, ${new Date("2024-11-29T22:00:00Z").getHours()}:00`, + `${new Date("2024-11-29T22:00:00Z").getDate()}/${new Date("2024-11-29T22:00:00Z").getMonth() + 1}, ${new Date("2024-11-29T22:00:00Z").getHours()}:00`, ), ).toBeInTheDocument(); + let hours = new Date("2024-11-29T23:00:00Z").getHours(); + if (hours === 0) hours = "00"; expect( screen.getByText( - `29/11, ${new Date("2024-11-29T23:00:00Z").getHours()}:00`, + `${new Date("2024-11-29T23:00:00Z").getDate()}/${new Date("2024-11-29T23:00:00Z").getMonth() + 1}, ${hours}:00`, ), ).toBeInTheDocument(); expect(screen.getByText("AMBER")).toBeInTheDocument(); diff --git a/frontend/tests/components/plugins/PluginsContainers.test.jsx b/frontend/tests/components/plugins/PluginsContainers.test.jsx index 49b3c1086b..97181245a6 100644 --- a/frontend/tests/components/plugins/PluginsContainers.test.jsx +++ b/frontend/tests/components/plugins/PluginsContainers.test.jsx @@ -10,6 +10,7 @@ import { mockedUsePluginConfigurationStore, } from "../../mock"; +jest.mock("reactflow/dist/style.css", () => {}); jest.mock("axios"); jest.mock("../../../src/stores/useOrganizationStore", () => ({ useOrganizationStore: jest.fn((state) => diff --git a/frontend/tests/components/plugins/flows/PlaybookFlows.test.jsx b/frontend/tests/components/plugins/flows/PlaybookFlows.test.jsx new file mode 100644 index 0000000000..a6b657188e --- /dev/null +++ b/frontend/tests/components/plugins/flows/PlaybookFlows.test.jsx @@ -0,0 +1,170 @@ +/* eslint-disable id-length */ +import React from "react"; +import "@testing-library/jest-dom"; +import { render, screen } from "@testing-library/react"; +import { BrowserRouter } from "react-router-dom"; +import { PlaybookFlows } from "../../../../src/components/plugins/flows/PlaybookFlows"; +import { + mockedUsePluginConfigurationStore, + mockedPlaybooks, +} from "../../../mock"; + +jest.mock("reactflow/dist/style.css", () => {}); + +jest.mock("../../../../src/stores/usePluginConfigurationStore", () => ({ + usePluginConfigurationStore: jest.fn((state) => + state(mockedUsePluginConfigurationStore), + ), +})); + +describe("test PlaybookFlows", () => { + // mock needed for testing flow https://reactflow.dev/learn/advanced-use/testing#using-jest + beforeEach(() => { + let MockObserverInstance = typeof ResizeObserver; + MockObserverInstance = { + observe: jest.fn(), + unobserve: jest.fn(), + disconnect: jest.fn(), + }; + global.ResizeObserver = jest + .fn() + .mockImplementation(() => MockObserverInstance); + + let MockDOMMatrixInstance = typeof DOMMatrixReadOnly; + const mockDOMMatrix = (transform) => { + const scale = transform?.match(/scale\(([1-9.])\)/)?.[1]; + MockDOMMatrixInstance = { + m22: scale !== undefined ? +scale : 1, + }; + return MockDOMMatrixInstance; + }; + global.DOMMatrixReadOnly = jest + .fn() + .mockImplementation((transform) => mockDOMMatrix(transform)); + + Object.defineProperties(global.HTMLElement.prototype, { + offsetHeight: { + get() { + return parseFloat(this.style.height) || 1; + }, + }, + offsetWidth: { + get() { + return parseFloat(this.style.width) || 1; + }, + }, + }); + + global.SVGElement.prototype.getBBox = () => ({ + x: 0, + y: 0, + width: 0, + height: 0, + }); + }); + + test("PlaybookFlows - only root (playbook)", () => { + const { container } = render( + + + , + ); + // Root node + const rootNode = container.querySelector("#playbook-5"); + expect(rootNode).toBeInTheDocument(); + expect(rootNode.textContent).toBe("TEST_PLAYBOOK_FILE"); + const playbookBadge = screen.getByText("Playbook"); + expect(playbookBadge).toBeInTheDocument(); + expect(playbookBadge.className).toContain("badge bg-secondary"); + }); + + test("PlaybookFlows - playbook + pivot + playbook", () => { + const { container } = render( + + + , + ); + // Root node (playbook) + const rootNode = container.querySelector("#playbook-2"); + expect(rootNode).toBeInTheDocument(); + expect(rootNode.textContent).toBe("TEST_PLAYBOOK_DOMAIN"); + const playbookBadge = screen.getAllByText("Playbook")[0]; + expect(playbookBadge).toBeInTheDocument(); + expect(playbookBadge.className).toContain("badge bg-secondary"); + // pivot node + const pivotNode = container.querySelector("#pivot-13"); + expect(pivotNode).toBeInTheDocument(); + expect(pivotNode.textContent).toBe("TEST_PIVOT"); + const pivotBadge = screen.getByText("Pivot"); + expect(pivotBadge).toBeInTheDocument(); + expect(pivotBadge.className).toContain("bg-advisory badge"); + // second playbook + const secondPlaybookNode = container.querySelector("#playbook-1"); + expect(secondPlaybookNode).toBeInTheDocument(); + expect(secondPlaybookNode.textContent).toBe("TEST_PLAYBOOK_IP"); + const secondPlaybookBadge = screen.getAllByText("Playbook")[1]; + expect(secondPlaybookBadge).toBeInTheDocument(); + expect(secondPlaybookBadge.className).toContain("badge bg-secondary"); + }); + + test("PlaybookFlows - playbook + pivot + playbook not configured", () => { + mockedUsePluginConfigurationStore.pivots.push({ + id: 3, + name: "TEST_PIVOT_ERROR", + description: "pivot: test", + python_module: "self_analyzable.SelfAnalyzable", + playbooks_choice: ["NO_CONFIGURED_PLAYBOOK"], + disabled: false, + soft_time_limit: 60, + routing_key: "default", + health_check_status: true, + delay: "00:00:00", + health_check_task: null, + config: { + queue: "default", + soft_time_limit: 60, + }, + related_analyzer_configs: ["TEST_ANALYZER"], + secrets: {}, + params: {}, + verification: { + configured: true, + details: "Ready to use!", + missing_secrets: [], + }, + }); + mockedPlaybooks.TEST_PLAYBOOK_DOMAIN.pivots = ["TEST_PIVOT_ERROR"]; + + const { container } = render( + + + , + ); + // Root node (playbook) + const rootNode = container.querySelector("#playbook-2"); + expect(rootNode).toBeInTheDocument(); + expect(rootNode.textContent).toBe("TEST_PLAYBOOK_DOMAIN"); + const playbookBadge = screen.getAllByText("Playbook")[0]; + expect(playbookBadge).toBeInTheDocument(); + expect(playbookBadge.className).toContain("badge bg-secondary"); + // pivot node + const pivotNode = container.querySelector("#pivot-3"); + expect(pivotNode).toBeInTheDocument(); + expect(pivotNode.textContent).toBe("TEST_PIVOT_ERROR"); + const pivotBadge = screen.getByText("Pivot"); + expect(pivotBadge).toBeInTheDocument(); + expect(pivotBadge.className).toContain("bg-advisory badge"); + const pivotWarningIcon = container.querySelector("#pivot-warning-icon"); + expect(pivotWarningIcon).toBeInTheDocument(); + // second playbook + const secondPlaybookNode = container.querySelector( + "#playbook-NO_CONFIGURED_PLAYBOOK", + ); + expect(secondPlaybookNode).toBeInTheDocument(); + expect(secondPlaybookNode.textContent).toBe("NO_CONFIGURED_PLAYBOOK"); + const secondPlaybookBadge = screen.getAllByText("Playbook")[1]; + expect(secondPlaybookBadge).toBeInTheDocument(); + expect(secondPlaybookBadge.className).toContain("badge bg-secondary"); + expect(secondPlaybookBadge).toHaveStyle(`opacity: 0.5`); + }); +}); diff --git a/frontend/tests/components/plugins/forms/PivotConfigForm.test.jsx b/frontend/tests/components/plugins/forms/PivotConfigForm.test.jsx index 7f1e18aabe..3c6a2075c0 100644 --- a/frontend/tests/components/plugins/forms/PivotConfigForm.test.jsx +++ b/frontend/tests/components/plugins/forms/PivotConfigForm.test.jsx @@ -281,7 +281,7 @@ describe("PivotConfigForm test", () => { { name: "myNewPivot", python_module: "self_analyzable.SelfAnalyzable", - playbooks_choice: ["DNS"], + playbooks_choice: ["TEST_PLAYBOOK_IP"], related_analyzer_configs: ["TEST_ANALYZER"], related_connector_configs: [], }, diff --git a/frontend/tests/components/plugins/tables/Analyzers.test.jsx b/frontend/tests/components/plugins/tables/Analyzers.test.jsx index 8c7b3b11b5..59b148bf0d 100644 --- a/frontend/tests/components/plugins/tables/Analyzers.test.jsx +++ b/frontend/tests/components/plugins/tables/Analyzers.test.jsx @@ -11,6 +11,7 @@ import { mockedUsePluginConfigurationStore, } from "../../../mock"; +jest.mock("reactflow/dist/style.css", () => {}); jest.mock("axios"); jest.mock("../../../../src/stores/useAuthStore", () => ({ useAuthStore: jest.fn((state) => state(mockedUseAuthStore)), diff --git a/frontend/tests/components/plugins/tables/Connectors.test.jsx b/frontend/tests/components/plugins/tables/Connectors.test.jsx index bccb46ae78..e66c363849 100644 --- a/frontend/tests/components/plugins/tables/Connectors.test.jsx +++ b/frontend/tests/components/plugins/tables/Connectors.test.jsx @@ -9,6 +9,7 @@ import { mockedUsePluginConfigurationStore, } from "../../../mock"; +jest.mock("reactflow/dist/style.css", () => {}); jest.mock("axios"); jest.mock("../../../../src/stores/useOrganizationStore", () => ({ useOrganizationStore: jest.fn((state) => diff --git a/frontend/tests/components/plugins/tables/Ingestors.test.jsx b/frontend/tests/components/plugins/tables/Ingestors.test.jsx index 18ec208f28..d9abce5b96 100644 --- a/frontend/tests/components/plugins/tables/Ingestors.test.jsx +++ b/frontend/tests/components/plugins/tables/Ingestors.test.jsx @@ -6,6 +6,7 @@ import Ingestors from "../../../../src/components/plugins/tables/Ingestors"; import { mockedUsePluginConfigurationStore } from "../../../mock"; +jest.mock("reactflow/dist/style.css", () => {}); jest.mock("axios"); jest.mock("../../../../src/stores/useOrganizationStore", () => ({ useOrganizationStore: jest.fn((state) => diff --git a/frontend/tests/components/plugins/tables/Pivots.test.jsx b/frontend/tests/components/plugins/tables/Pivots.test.jsx index 765af33231..c9390db4d1 100644 --- a/frontend/tests/components/plugins/tables/Pivots.test.jsx +++ b/frontend/tests/components/plugins/tables/Pivots.test.jsx @@ -9,6 +9,7 @@ import { mockedUsePluginConfigurationStore, } from "../../../mock"; +jest.mock("reactflow/dist/style.css", () => {}); jest.mock("axios"); jest.mock("../../../../src/stores/useOrganizationStore", () => ({ useOrganizationStore: jest.fn((state) => @@ -29,7 +30,7 @@ describe("test Pivots component", () => { , ); - const title = screen.getByRole("heading", { name: "Pivots 0 total" }); + const title = screen.getByRole("heading", { name: "Pivots 1 total" }); expect(title).toBeInTheDocument(); // table const tableComponent = screen.getByRole("table"); diff --git a/frontend/tests/components/plugins/tables/Playbooks.test.jsx b/frontend/tests/components/plugins/tables/Playbooks.test.jsx index 69b70b8884..cf55d92b70 100644 --- a/frontend/tests/components/plugins/tables/Playbooks.test.jsx +++ b/frontend/tests/components/plugins/tables/Playbooks.test.jsx @@ -10,6 +10,7 @@ import { mockedUsePluginConfigurationStore, } from "../../../mock"; +jest.mock("reactflow/dist/style.css", () => {}); jest.mock("axios"); jest.mock("../../../../src/stores/useAuthStore", () => ({ useAuthStore: jest.fn((state) => state(mockedUseAuthStore)), diff --git a/frontend/tests/components/plugins/tables/Visualizers.test.jsx b/frontend/tests/components/plugins/tables/Visualizers.test.jsx index 8d76850c86..4fe3fdec70 100644 --- a/frontend/tests/components/plugins/tables/Visualizers.test.jsx +++ b/frontend/tests/components/plugins/tables/Visualizers.test.jsx @@ -9,6 +9,7 @@ import { mockedUsePluginConfigurationStore, } from "../../../mock"; +jest.mock("reactflow/dist/style.css", () => {}); jest.mock("axios"); jest.mock("../../../../src/stores/useOrganizationStore", () => ({ useOrganizationStore: jest.fn((state) => diff --git a/frontend/tests/components/plugins/tables/pluginActionsButtons.test.jsx b/frontend/tests/components/plugins/tables/pluginActionsButtons.test.jsx index dc65206f57..2937e83398 100644 --- a/frontend/tests/components/plugins/tables/pluginActionsButtons.test.jsx +++ b/frontend/tests/components/plugins/tables/pluginActionsButtons.test.jsx @@ -13,8 +13,13 @@ import { PluginPullButton, PlaybooksEditButton, PluginConfigButton, + PlaybookFlowsButton, + MappingDataModel, } from "../../../../src/components/plugins/tables/pluginActionsButtons"; -import { mockedUseOrganizationStoreOwner } from "../../../mock"; +import { + mockedUseOrganizationStoreOwner, + mockedPlaybooks, +} from "../../../mock"; jest.mock("axios"); jest.mock("../../../../src/stores/useOrganizationStore", () => ({ @@ -22,6 +27,10 @@ jest.mock("../../../../src/stores/useOrganizationStore", () => ({ state(mockedUseOrganizationStoreOwner), ), })); +// mock flow component +jest.mock("../../../../src/components/plugins/flows/PlaybookFlows", () => ({ + PlaybookFlows: jest.fn((props) =>
), +})); // current user must be equal to org owner jest.mock("../../../../src/stores/useAuthStore", () => ({ @@ -456,3 +465,80 @@ describe("Plugin Config test", () => { }, ); }); + +describe("PlaybookFlowsButton test", () => { + test("PlaybookFlowsButton", async () => { + const userAction = userEvent.setup(); + const { container } = render( + + + , + ); + + const playbookFlowsIcon = container.querySelector( + "#playbook-flows-btn__TEST_PLAYBOOK_DOMAIN", + ); + expect(playbookFlowsIcon).toBeInTheDocument(); + + userAction.click(playbookFlowsIcon); + await waitFor(() => { + expect(screen.getByText("Possible playbook flows")).toBeInTheDocument(); + }); + }); +}); + +describe("DataModel mapping test", () => { + test("DataModel mapping button", async () => { + const userAction = userEvent.setup(); + const data = { + mapping_data_model: { + permalink: "external_references", + "data.hostnames": "resolutions", + }, + type: "observable", + python_module: "pythonmodule.pythonclass", + }; + const { container } = render( + + + , + ); + + const dataModelMappingIcon = container.querySelector( + "#mapping-data-model__pythonmodule", + ); + expect(dataModelMappingIcon).toBeInTheDocument(); + + userAction.click(dataModelMappingIcon); + await waitFor(() => { + expect(screen.getByText("Data model mapping")).toBeInTheDocument(); + }); + }); + + test("DataModel mapping button - disabled", async () => { + const data = { + mapping_data_model: {}, + type: "observable", + python_module: "pythonmodule.pythonclass", + }; + const { container } = render( + + + , + ); + + const dataModelMappingIcon = container.querySelector( + "#mapping-data-model__pythonmodule", + ); + expect(dataModelMappingIcon).toBeInTheDocument(); + expect(dataModelMappingIcon.className).toContain("disabled"); + }); +}); diff --git a/frontend/tests/mock.js b/frontend/tests/mock.js index e646fdebc0..f518568a81 100644 --- a/frontend/tests/mock.js +++ b/frontend/tests/mock.js @@ -181,6 +181,7 @@ export const mockedPlaybooks = { }, analyzers: [], connectors: [], + pivots: [], scan_mode: 2, scan_check_time: "02:00:00:00", tags: [ @@ -192,6 +193,7 @@ export const mockedPlaybooks = { ], tlp: "CLEAR", starting: true, + id: 1, }, TEST_PLAYBOOK_DOMAIN: { name: "TEST_PLAYBOOK_DOMAIN", @@ -205,11 +207,13 @@ export const mockedPlaybooks = { }, analyzers: [], connectors: [], + pivots: ["TEST_PIVOT"], scan_mode: 2, scan_check_time: "02:00:00:00", tags: [], tlp: "CLEAR", starting: true, + id: 2, }, TEST_PLAYBOOK_URL: { name: "TEST_PLAYBOOK_URL", @@ -223,11 +227,13 @@ export const mockedPlaybooks = { }, analyzers: [], connectors: [], + pivots: [], scan_mode: 1, scan_check_time: null, tags: [], tlp: "AMBER", starting: true, + id: 3, }, TEST_PLAYBOOK_HASH: { name: "TEST_PLAYBOOK_HASH", @@ -241,11 +247,13 @@ export const mockedPlaybooks = { }, analyzers: [], connectors: [], + pivots: [], scan_mode: 1, scan_check_time: null, tags: [], tlp: "AMBER", starting: true, + id: 4, }, TEST_PLAYBOOK_FILE: { name: "TEST_PLAYBOOK_FILE", @@ -259,11 +267,13 @@ export const mockedPlaybooks = { }, analyzers: [], connectors: [], + pivots: [], scan_mode: 1, scan_check_time: null, tags: [], tlp: "AMBER", starting: true, + id: 5, }, TEST_PLAYBOOK_GENERIC: { name: "TEST_PLAYBOOK_GENERIC", @@ -277,11 +287,13 @@ export const mockedPlaybooks = { }, analyzers: [], connectors: [], + pivots: [], scan_mode: 1, scan_check_time: null, tags: [], tlp: "AMBER", starting: true, + id: 6, }, }; @@ -303,6 +315,7 @@ export const mockedPlugins = { run_hash: false, run_hash_type: "", not_supported_filetypes: [], + mapping_data_model: {}, params: { query_type: { type: "str", @@ -353,7 +366,7 @@ export const mockedPlugins = { name: "TEST_PIVOT", description: "pivot: test", python_module: "self_analyzable.SelfAnalyzable", - playbooks_choice: ["DNS"], + playbooks_choice: ["TEST_PLAYBOOK_IP"], disabled: false, soft_time_limit: 60, routing_key: "default", @@ -388,7 +401,7 @@ export const mockedUsePluginConfigurationStore = { visualizersError: null, analyzers: [mockedPlugins.ANALYZER], connectors: [mockedPlugins.CONNECTOR], - pivots: [], + pivots: [mockedPlugins.PIVOT], visualizers: [], ingestors: [], playbooks: [ diff --git a/integrations/cyberchef/Dockerfile b/integrations/cyberchef/Dockerfile index 71387b1e1d..ce39ed7211 100644 --- a/integrations/cyberchef/Dockerfile +++ b/integrations/cyberchef/Dockerfile @@ -1,5 +1,5 @@ # Adapted from https://github.com/gchq/CyberChef-server/blob/master/Dockerfile -FROM node:alpine3.10 +FROM node:lts-alpine3.21 RUN apk update && apk --no-cache add git LABEL author = "Wes Lambert, wlambertts@gmail.com" LABEL description="Dockerised version of Cyberchef server (https://github.com/gchq/CyberChef-server)" diff --git a/integrations/cyberchef/hooks/build b/integrations/cyberchef/hooks/build new file mode 100644 index 0000000000..3dc2ddf066 --- /dev/null +++ b/integrations/cyberchef/hooks/build @@ -0,0 +1,18 @@ +#!/bin/bash + +echo "display path" +echo "$(pwd)" +echo "display dockerfile path" +echo $DOCKERFILE_PATH +echo "current branch" +echo "$SOURCE_BRANCH" + +version_regex='^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$' +if [[ "$SOURCE_BRANCH" == "master" || "$SOURCE_BRANCH" =~ $version_regex ]]; then + echo "The branch is master, proceeding with multi-arch build" + docker buildx create --name multiarch --use + docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --platform linux/arm64,linux/amd64 --push . +else + echo "The branch is not master, proceeding with classic build" + docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --push . +fi \ No newline at end of file diff --git a/integrations/malware_tools_analyzers/Dockerfile b/integrations/malware_tools_analyzers/Dockerfile index aafcbbdab1..37012d120f 100644 --- a/integrations/malware_tools_analyzers/Dockerfile +++ b/integrations/malware_tools_analyzers/Dockerfile @@ -1,37 +1,35 @@ -# this base was derived from Thug requirements: -# https://github.com/REMnux/docker/blob/master/thug/Dockerfile -FROM python:3.9-slim +FROM python:3.11-slim -ENV PROJECT_PATH /opt/deploy -ENV LOG_PATH /var/log/intel_owl/malware_tools_analyzers -ENV USER malware_tools_analyzers-user +ARG TARGETARCH +RUN echo "TargetArch is $TARGETARCH" + +ENV PROJECT_PATH=/opt/deploy +ENV LOG_PATH=/var/log/intel_owl/malware_tools_analyzers +ENV USER=malware_tools_analyzers-user # update and install packages # line 3: ClamAV deps # line 4: Box-JS deps -# lines 5-6: Thug deps RUN DEBIAN_FRONTEND=noninteractive apt-get update -qq \ - && apt-get install -y --no-install-recommends wget git libssl3 swig g++ make libssl-dev libmagic1 vim \ + && apt-get install -y --no-install-recommends wget git libssl3 swig g++ make libssl-dev libmagic1 vim unzip \ clamav clamdscan clamav-daemon clamav-freshclam \ - nodejs npm gcc m4 \ - build-essential python3-dev python3-setuptools python3-wheel python-is-python3 libboost-dev libboost-iostreams-dev libboost-python-dev libboost-system-dev python3-pip libxml2-dev \ - libxslt-dev tesseract-ocr unzip libtool graphviz-dev automake libffi-dev graphviz libfuzzy-dev libfuzzy2 libjpeg-dev libffi-dev pkg-config clang autoconf + nodejs npm gcc m4 # Add a new low-privileged user RUN useradd -ms /bin/bash ${USER} \ - && mkdir ${PROJECT_PATH} ${PROJECT_PATH}/thug ${PROJECT_PATH}/qiling ${PROJECT_PATH}/stringsifter ${PROJECT_PATH}/peframe ${PROJECT_PATH}/apkid + && mkdir ${PROJECT_PATH} ${PROJECT_PATH}/qiling ${PROJECT_PATH}/stringsifter ${PROJECT_PATH}/peframe ${PROJECT_PATH}/apkid WORKDIR ${PROJECT_PATH} # Install Box-js -RUN npm install box-js@1.9.17 --global --production \ +RUN npm install box-js@1.9.27 --global --production \ && mkdir -p /tmp/boxjs \ && chown -R ${USER}:${USER} /tmp/boxjs # Install CAPA WORKDIR ${PROJECT_PATH}/capa -RUN wget -q https://github.com/mandiant/capa/releases/download/v8.0.0/capa-v8.0.0-linux.zip \ - && unzip capa-v8.0.0-linux.zip \ +RUN wget -q https://github.com/mandiant/capa/releases/download/v9.0.0/capa-v9.0.0-linux.zip \ + && unzip capa-v9.0.0-linux.zip \ && ln -s ${PROJECT_PATH}/capa/capa /usr/local/bin/capa # Install Floss @@ -61,10 +59,12 @@ RUN python3 -m venv venv \ # Build Qiling WORKDIR ${PROJECT_PATH}/qiling COPY requirements/qiling-requirements.txt qiling/analyze.py ./ -RUN python3 -m venv venv \ +# keystone-engine does not compile for ARM +RUN if [[ $TARGETARCH == "amd" ]]; then \ + python3 -m venv venv \ && . venv/bin/activate \ && pip3 install --no-cache-dir --upgrade pip \ - && pip3 install --no-cache-dir -r qiling-requirements.txt + && pip3 install --no-cache-dir -r qiling-requirements.txt; fi # Build APKiD WORKDIR ${PROJECT_PATH}/apkid @@ -76,8 +76,11 @@ RUN python3 -m venv venv \ # Install GoReSym WORKDIR ${PROJECT_PATH}/goresym -RUN wget -q https://github.com/mandiant/GoReSym/releases/download/v2.7.4/GoReSym-linux.zip \ - && unzip GoReSym-linux.zip \ +RUN if [[ $TARGETARCH == "amd" ]]; \ + then export GORESYM_ARCH="linux"; \ + else export GORESYM_ARCH="mac"; fi \ + && wget -q "https://github.com/mandiant/GoReSym/releases/download/v3.0.2/GoReSym-$GORESYM_ARCH.zip" \ + && unzip GoReSym-$GORESYM_ARCH.zip \ && chmod +x GoReSym \ && ln -s ${PROJECT_PATH}/goresym/GoReSym /usr/local/bin/goresym @@ -105,6 +108,7 @@ RUN python3 -m venv venv \ COPY ./droidlysis/general.conf ${PROJECT_PATH}/droidlysis/conf/general.conf # Install artifacts +# there is no version management on this project so we just pull the most recent changes WORKDIR ${PROJECT_PATH}/artifacts RUN python3 -m venv venv \ && . venv/bin/activate \ @@ -112,13 +116,7 @@ RUN python3 -m venv venv \ && git clone https://github.com/guelfoweb/artifacts.git \ && cd artifacts \ && pip install --no-cache-dir -r requirements.txt \ - && chmod +x artifacts.py - -# Install Detect-it-Easy -WORKDIR ${PROJECT_PATH}/die -RUN apt-get install --no-install-recommends -y wget tar libglib2.0-0 && \ - wget -q https://github.com/horsicq/DIE-engine/releases/download/3.01/die_lin64_portable_3.01.tar.gz && \ - tar -xzf die_lin64_portable_3.01.tar.gz + && chmod +x artifacts.py # MobSF WORKDIR ${PROJECT_PATH}/mobsf @@ -131,27 +129,6 @@ RUN python3 -m venv venv \ && chown -R ${USER}:${USER} /root/.semgrep \ && chmod 711 /root -# Install Thug -# https://github.com/buffer/thug/blob/master/docker/Dockerfile -WORKDIR ${PROJECT_PATH}/thug -COPY requirements/thug-requirements.txt ./ -RUN python3 -m venv venv \ - && . venv/bin/activate \ - && pip3 install --no-cache-dir --upgrade pip \ - # this is the python 3.9 version. Once you update the base image you should switch to the right version (cp39) - && wget -q https://github.com/cloudflare/stpyv8/releases/download/v12.7.224.18/stpyv8-12.7.224.18-cp39-cp39-manylinux_2_31_x86_64.whl \ - && pip3 install --no-cache-dir stpyv8-12.7.224.18-cp39-cp39-manylinux_2_31_x86_64.whl \ - && rm stpyv8-12.7.224.18-cp39-cp39-manylinux_2_31_x86_64.whl \ - && mkdir -p /usr/share/stpyv8 \ - && git clone https://github.com/buffer/libemu.git && cd libemu && autoreconf -v -i && ./configure && make install && cd .. && rm -rf libemu && ldconfig \ - && pip3 install --no-cache-dir -r thug-requirements.txt \ - && git clone --depth 1 https://github.com/buffer/thug.git \ - && mkdir -p /etc/thug \ - && cp -R thug/thug/conf/* /etc/thug \ - && rm -rf thug \ - && mkdir -p /tmp/thug/logs \ - && chown -R ${USER}:${USER} /tmp/thug/logs /etc/thug - # prepare fangfrisch installation COPY crontab /etc/cron.d/crontab RUN mkdir -m 0770 -p /var/lib/fangfrisch \ diff --git a/integrations/malware_tools_analyzers/app.py b/integrations/malware_tools_analyzers/app.py index 09b4913f71..c85abbce5f 100644 --- a/integrations/malware_tools_analyzers/app.py +++ b/integrations/malware_tools_analyzers/app.py @@ -130,44 +130,6 @@ def intercept_droidlysis_result(context, future: Future) -> None: shutil.rmtree(dir_loc, ignore_errors=True) -def intercept_thug_result(context, future: Future) -> None: - """ - Thug doesn't output result to standard output but to a file, - using this callback function, - we intercept the future object and update its result attribute - by reading the final analysis result from the saved result file - before it is ready to be consumed. - """ - # 1. get current result object - res = future.result() - # 2. dir from which we will read final analysis result - dir_loc = context.get("read_result_from", None) - if not dir_loc: - res["error"] += ", No specified file to read result from" - if res.get("returncode", -1) == 0: - res["returncode"] = -1 - else: - # 3. read saved result file, if it exists - f_loc = dir_loc + "/analysis/json/analysis.json" - if not os.path.exists(f_loc): - res["error"] += f", result file {f_loc} does not exists." - if res.get("returncode", -1) == 0: - res["returncode"] = -1 - else: - with open(f_loc, "r", encoding="utf-8") as fp: - try: - res["report"] = json.load(fp) - except json.JSONDecodeError: - res["report"] = fp.read() - - # 4. set final result after modifications - future._result = res # skipcq PYL-W0212 - - # 5. directory can be removed now - if dir_loc: - shutil.rmtree(dir_loc, ignore_errors=True) - - # with this, we can make http calls to the endpoint: /capa shell2http.register_command(endpoint="capa", command_name="/usr/local/bin/capa -q -j") @@ -210,12 +172,6 @@ def intercept_thug_result(context, future: Future) -> None: command_name="/opt/deploy/qiling/venv/bin/python3 /opt/deploy/qiling/analyze.py", ) -# diec is the command for Detect It Easy -shell2http.register_command( - endpoint="die", - command_name="/opt/deploy/die/die_lin64_portable/base/diec", -) - # mobsfscan is the command for DroidLysis shell2http.register_command( endpoint="mobsf", @@ -234,15 +190,9 @@ def intercept_thug_result(context, future: Future) -> None: endpoint="artifacts", command_name="/opt/deploy/artifacts/venv/bin/python3 /opt/deploy/artifacts/artifacts/artifacts.py", ) + # goresym is the command for GoReSym shell2http.register_command( endpoint="goresym", command_name="/usr/local/bin/goresym", ) - -# with this, we can make http calls to the endpoint: /thug -shell2http.register_command( - endpoint="thug", - command_name="/opt/deploy/thug/venv/bin/thug -qZF", - callback_fn=intercept_thug_result, -) diff --git a/integrations/malware_tools_analyzers/hooks/build b/integrations/malware_tools_analyzers/hooks/build new file mode 100644 index 0000000000..3dc2ddf066 --- /dev/null +++ b/integrations/malware_tools_analyzers/hooks/build @@ -0,0 +1,18 @@ +#!/bin/bash + +echo "display path" +echo "$(pwd)" +echo "display dockerfile path" +echo $DOCKERFILE_PATH +echo "current branch" +echo "$SOURCE_BRANCH" + +version_regex='^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$' +if [[ "$SOURCE_BRANCH" == "master" || "$SOURCE_BRANCH" =~ $version_regex ]]; then + echo "The branch is master, proceeding with multi-arch build" + docker buildx create --name multiarch --use + docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --platform linux/arm64,linux/amd64 --push . +else + echo "The branch is not master, proceeding with classic build" + docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --push . +fi \ No newline at end of file diff --git a/integrations/malware_tools_analyzers/requirements/droidlysis-requirements.txt b/integrations/malware_tools_analyzers/requirements/droidlysis-requirements.txt index 9f16f4c24a..b2b6b09381 100644 --- a/integrations/malware_tools_analyzers/requirements/droidlysis-requirements.txt +++ b/integrations/malware_tools_analyzers/requirements/droidlysis-requirements.txt @@ -1 +1,3 @@ +# they do not make releases +# if you update this, you should take into considerations all the other dependencies in the Dockerfile too git+https://github.com/cryptax/droidlysis@c1645a5 \ No newline at end of file diff --git a/integrations/malware_tools_analyzers/requirements/flask-requirements.txt b/integrations/malware_tools_analyzers/requirements/flask-requirements.txt index 8d3b8f9e86..63517d41f0 100644 --- a/integrations/malware_tools_analyzers/requirements/flask-requirements.txt +++ b/integrations/malware_tools_analyzers/requirements/flask-requirements.txt @@ -1,3 +1,6 @@ +# Flask-Shell2HTTP dynamically install the most recent supported version of Flask +# So, if you want reproducible builds, you must explicitly state the flask version you want to install Flask-Shell2HTTP-fork==1.9.2 +flask==3.1.0 gunicorn==23.0.0 fangfrisch==1.9.0 \ No newline at end of file diff --git a/integrations/malware_tools_analyzers/requirements/qiling-requirements.txt b/integrations/malware_tools_analyzers/requirements/qiling-requirements.txt index fbe59de4a9..94ce90b1ce 100644 --- a/integrations/malware_tools_analyzers/requirements/qiling-requirements.txt +++ b/integrations/malware_tools_analyzers/requirements/qiling-requirements.txt @@ -1 +1 @@ -qiling==1.4.5 \ No newline at end of file +qiling==1.4.6 \ No newline at end of file diff --git a/integrations/malware_tools_analyzers/requirements/stringsifter-requirements.txt b/integrations/malware_tools_analyzers/requirements/stringsifter-requirements.txt index f425620ee0..f85c13f8c1 100644 --- a/integrations/malware_tools_analyzers/requirements/stringsifter-requirements.txt +++ b/integrations/malware_tools_analyzers/requirements/stringsifter-requirements.txt @@ -1,2 +1 @@ -wheel==0.40.0 stringsifter==3.20230711 \ No newline at end of file diff --git a/integrations/malware_tools_analyzers/requirements/thug-requirements.txt b/integrations/malware_tools_analyzers/requirements/thug-requirements.txt deleted file mode 100644 index a34b5d6a23..0000000000 --- a/integrations/malware_tools_analyzers/requirements/thug-requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -setuptools==70.0.0 -pytesseract==0.3.10 -pygraphviz==1.10 -# CAREFUL! This is strictly tied to STpy version and Python version -# DO NOT UPGRADE THIS WITHOUT PROPER MANUAL TESTING -thug==4.9 \ No newline at end of file diff --git a/integrations/nuclei_analyzer/Dockerfile b/integrations/nuclei_analyzer/Dockerfile new file mode 100644 index 0000000000..6ca632b6fc --- /dev/null +++ b/integrations/nuclei_analyzer/Dockerfile @@ -0,0 +1,44 @@ +FROM projectdiscovery/nuclei:v3.3.8 + +ENV LOG_PATH=/var/log/intel_owl/nuclei_analyzer +ENV USER=nuclei-user +ENV PROJECT_PATH=/app + +# Create non-root user +RUN adduser -D -h /home/${USER} ${USER} + +# Install required packages using apk and clean cache in the same layer +RUN apk add --no-cache python3=3.11.11-r0 py3-pip \ + && rm -rf /var/cache/apk/* \ + && pip3 install --no-cache-dir --upgrade pip + +# Create working directory and set ownership +WORKDIR /app + +# Copy and install requirements first (better layer caching) +COPY requirements.txt . +RUN pip3 install --no-cache-dir -r requirements.txt \ + && rm -rf ~/.cache/pip/* + +# Create log directory with proper permissions +RUN mkdir -p ${LOG_PATH} \ + && touch ${LOG_PATH}/gunicorn_access.log ${LOG_PATH}/gunicorn_errors.log \ + && chown -R ${USER}:${USER} ${LOG_PATH} \ + && chmod 755 ${LOG_PATH} \ + && chmod 666 ${LOG_PATH}/gunicorn_access.log \ + && chmod 666 ${LOG_PATH}/gunicorn_errors.log +# Copy application files +COPY app.py . +COPY entrypoint.sh /entrypoint.sh + +# Set proper permissions +RUN chown -R ${USER}:${USER} /app \ + && chmod +x /entrypoint.sh + +# Expose the API port +EXPOSE 4008 + +HEALTHCHECK --interval=45s --timeout=10s --retries=3 \ + CMD curl -f http://localhost:4008/health || exit 1 + +ENTRYPOINT ["/entrypoint.sh"] \ No newline at end of file diff --git a/integrations/nuclei_analyzer/app.py b/integrations/nuclei_analyzer/app.py new file mode 100644 index 0000000000..5d97c92660 --- /dev/null +++ b/integrations/nuclei_analyzer/app.py @@ -0,0 +1,82 @@ +import json +import logging +import os + +from flask import Flask +from flask_executor import Executor +from flask_shell2http import Shell2HTTP + +# Logger configuration +LOG_NAME = "nuclei_scanner" +logger = logging.getLogger("flask_shell2http") + +# Create formatter +formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + +# Set log level from environment variable or default to INFO +log_level = os.getenv("LOG_LEVEL", logging.INFO) +log_path = os.getenv("LOG_PATH", f"/var/log/intel_owl/{LOG_NAME}") + +# Create file handlers for both general logs and errors +fh = logging.FileHandler(f"{log_path}/{LOG_NAME}.log") +fh.setFormatter(formatter) +fh.setLevel(log_level) + +fh_err = logging.FileHandler(f"{log_path}/{LOG_NAME}_errors.log") +fh_err.setFormatter(formatter) +fh_err.setLevel(logging.ERROR) + +# Add handlers to logger +logger.addHandler(fh) +logger.addHandler(fh_err) +logger.setLevel(log_level) + +# Flask application instance with secret key +app = Flask(__name__) +app.config["SECRET_KEY"] = os.getenv("SECRET_KEY", os.urandom(24).hex()) + +# Initialize the Executor for background task processing +executor = Executor(app) + +# Initialize the Shell2HTTP for exposing shell commands as HTTP endpoints +shell2http = Shell2HTTP(app=app, executor=executor) + + +@app.route("/health", methods=["GET"]) +def health_check(): + return {"status": "healthy"}, 200 + + +def my_callback_fn(context, future): + """ + Callback function to handle Nuclei scan results + """ + try: + result = future.result() + report = result["report"] + # The report is a string with multiple JSON objects separated by newlines + json_objects = [] + for line in report.strip().split("\n"): + try: + json_objects.append(json.loads(line)) + except json.JSONDecodeError: + logger.warning(f"Skipping non-JSON line: {line}") + result["report"] = {"data": json_objects} + logger.info(f"Nuclei scan completed for context: {context}") + logger.debug(f"Scan result: {result}") + except Exception as e: + logger.error(f"Error in callback function: {str(e)}", exc_info=True) + raise + + +# Register the 'nuclei' command +shell2http.register_command( + endpoint="run-nuclei", + command_name="nuclei -j -ud /opt/nuclei-api/nuclei-templates -u", + callback_fn=my_callback_fn, +) + + +if __name__ == "__main__": + logger.info("Starting Nuclei scanner API server") + app.run(host="0.0.0.0", port=4008) diff --git a/integrations/nuclei_analyzer/compose-tests.yml b/integrations/nuclei_analyzer/compose-tests.yml new file mode 100644 index 0000000000..da343a9c54 --- /dev/null +++ b/integrations/nuclei_analyzer/compose-tests.yml @@ -0,0 +1,6 @@ +services: + nuclei_analyzer: + build: + context: ../integrations/nuclei_analyzer + dockerfile: Dockerfile + image: intelowlproject/nuclei_analyzer:test \ No newline at end of file diff --git a/integrations/nuclei_analyzer/compose.yml b/integrations/nuclei_analyzer/compose.yml new file mode 100644 index 0000000000..ee8ef25407 --- /dev/null +++ b/integrations/nuclei_analyzer/compose.yml @@ -0,0 +1,15 @@ +# All additional integrations should be added following this format only. + +services: + nuclei_analyzer: + image: intelowlproject/nuclei_analyzer:${REACT_APP_INTELOWL_VERSION} + container_name: nuclei_analyzer + restart: unless-stopped + expose: + - "4008" + env_file: + - env_file_integrations + volumes: + - generic_logs:/var/log/intel_owl + depends_on: + - uwsgi diff --git a/integrations/nuclei_analyzer/entrypoint.sh b/integrations/nuclei_analyzer/entrypoint.sh new file mode 100755 index 0000000000..74f2c0e39f --- /dev/null +++ b/integrations/nuclei_analyzer/entrypoint.sh @@ -0,0 +1,21 @@ +#!/bin/sh +mkdir -p ${LOG_PATH} +touch ${LOG_PATH}/gunicorn_access.log ${LOG_PATH}/gunicorn_errors.log +chown -R ${USER}:${USER} ${LOG_PATH} + +TEMPLATES_DIR="/opt/nuclei-api/nuclei-templates" +echo "Updating Nuclei templates..." +nuclei -update-template-dir $TEMPLATES_DIR -update-templates +sleep 30 +echo "Verifying Nuclei templates..." +if [ ! -d "$TEMPLATES_DIR" ] || [ -z "$(ls -A $TEMPLATES_DIR)" ]; then + echo "Error: Nuclei templates not found or directory is empty. Please check you internet connection. Exiting..." + exit 1 +else + echo "Nuclei templates successfully updated." +fi +echo "Templates downloaded successfully. Starting Flask API..." +exec gunicorn 'app:app' \ + --bind '0.0.0.0:4008' \ + --access-logfile ${LOG_PATH}/gunicorn_access.log \ + --error-logfile ${LOG_PATH}/gunicorn_errors.log diff --git a/integrations/nuclei_analyzer/hooks/build b/integrations/nuclei_analyzer/hooks/build new file mode 100644 index 0000000000..3dc2ddf066 --- /dev/null +++ b/integrations/nuclei_analyzer/hooks/build @@ -0,0 +1,18 @@ +#!/bin/bash + +echo "display path" +echo "$(pwd)" +echo "display dockerfile path" +echo $DOCKERFILE_PATH +echo "current branch" +echo "$SOURCE_BRANCH" + +version_regex='^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$' +if [[ "$SOURCE_BRANCH" == "master" || "$SOURCE_BRANCH" =~ $version_regex ]]; then + echo "The branch is master, proceeding with multi-arch build" + docker buildx create --name multiarch --use + docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --platform linux/arm64,linux/amd64 --push . +else + echo "The branch is not master, proceeding with classic build" + docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --push . +fi \ No newline at end of file diff --git a/integrations/nuclei_analyzer/requirements.txt b/integrations/nuclei_analyzer/requirements.txt new file mode 100644 index 0000000000..3b5bf04267 --- /dev/null +++ b/integrations/nuclei_analyzer/requirements.txt @@ -0,0 +1,5 @@ +# Flask-Shell2HTTP dynamically install the most recent supported version of Flask +# So, if you want reproducible builds, you must explicitly state the flask version you want to install +Flask-Shell2HTTP-fork==1.9.2 +flask==3.1.0 +gunicorn==23.0.0 diff --git a/integrations/pcap_analyzers/Dockerfile b/integrations/pcap_analyzers/Dockerfile index 9c86cbbf93..b438a97fc4 100644 --- a/integrations/pcap_analyzers/Dockerfile +++ b/integrations/pcap_analyzers/Dockerfile @@ -1,9 +1,9 @@ # https://github.com/jasonish/docker-suricata/tree/master/7.0 FROM jasonish/suricata:7.0 -ENV PROJECT_PATH /opt/deploy -ENV LOG_PATH /var/log/intel_owl/pcap_analyzers -ENV USER pcap_analyzers-user +ENV PROJECT_PATH=/opt/deploy +ENV LOG_PATH=/var/log/intel_owl/pcap_analyzers +ENV USER=pcap_analyzers-user RUN dnf -y install python3-pip && dnf clean all && useradd -ms /bin/bash ${USER} # Build Flask REST API diff --git a/integrations/pcap_analyzers/hooks/build b/integrations/pcap_analyzers/hooks/build new file mode 100644 index 0000000000..3dc2ddf066 --- /dev/null +++ b/integrations/pcap_analyzers/hooks/build @@ -0,0 +1,18 @@ +#!/bin/bash + +echo "display path" +echo "$(pwd)" +echo "display dockerfile path" +echo $DOCKERFILE_PATH +echo "current branch" +echo "$SOURCE_BRANCH" + +version_regex='^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$' +if [[ "$SOURCE_BRANCH" == "master" || "$SOURCE_BRANCH" =~ $version_regex ]]; then + echo "The branch is master, proceeding with multi-arch build" + docker buildx create --name multiarch --use + docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --platform linux/arm64,linux/amd64 --push . +else + echo "The branch is not master, proceeding with classic build" + docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --push . +fi \ No newline at end of file diff --git a/integrations/pcap_analyzers/requirements.txt b/integrations/pcap_analyzers/requirements.txt index 06a702d452..e2c39890bd 100644 --- a/integrations/pcap_analyzers/requirements.txt +++ b/integrations/pcap_analyzers/requirements.txt @@ -1,3 +1,6 @@ +# Flask-Shell2HTTP dynamically install the most recent supported version of Flask +# So, if you want reproducible builds, you must explicitly state the flask version you want to install Flask-Shell2HTTP-fork==1.9.2 +flask==3.1.0 gunicorn==23.0.0 PyYAML==6.0 \ No newline at end of file diff --git a/integrations/phishing_analyzers/Dockerfile b/integrations/phishing_analyzers/Dockerfile index 7606874f2b..0f442724ff 100644 --- a/integrations/phishing_analyzers/Dockerfile +++ b/integrations/phishing_analyzers/Dockerfile @@ -12,8 +12,6 @@ RUN useradd -ms /bin/bash ${USER} RUN DEBIAN_FRONTEND=noninteractive apt-get update -qq \ && apt-get install -y --no-install-recommends \ libvulkan1 libu2f-udev fonts-liberation chromium sudo \ - && wget --progress=dot:giga https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb \ - && dpkg -i google-chrome-stable_current_amd64.deb \ && pip3 install --no-cache-dir --upgrade pip \ # Cleanup && apt-get remove --purge -y gcc \ diff --git a/integrations/phishing_analyzers/analyzers/driver_wrapper.py b/integrations/phishing_analyzers/analyzers/driver_wrapper.py index fc198232ce..afedc3553c 100644 --- a/integrations/phishing_analyzers/analyzers/driver_wrapper.py +++ b/integrations/phishing_analyzers/analyzers/driver_wrapper.py @@ -5,12 +5,14 @@ from typing import Iterator from selenium.common import WebDriverException +from selenium.common.exceptions import TimeoutException +from selenium.webdriver.chromium.options import ChromiumOptions from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.support.wait import WebDriverWait from seleniumwire.request import Request from seleniumwire.thirdparty.mitmproxy.exceptions import ServerException -from seleniumwire.webdriver import ChromeOptions, Remote +from seleniumwire.webdriver import Remote LOG_NAME = "driver_wrapper" @@ -72,7 +74,7 @@ def __init__( ) def _pick_free_port_from_pool( - self, sw_options: {}, options: ChromeOptions + self, sw_options: {}, options: ChromiumOptions ) -> Remote: tries: int = 0 while tries < self.port_pool_size: @@ -105,7 +107,7 @@ def _init_driver( self, window_width: int, window_height: int, user_agent: str ) -> Remote: logger.info(f"Adding proxy with option: {self.proxy}") - logger.info("Creating Chrome driver...") + logger.info("Creating Chromium driver...") sw_options: {} = { "auto_config": False, # Ensure this is set to False "enable_har": True, @@ -117,7 +119,7 @@ def _init_driver( if self.proxy: sw_options["proxy"] = {"http": self.proxy, "https": self.proxy} - options = ChromeOptions() + options = ChromiumOptions() # no_sandbox=True is a bad practice but it's almost the only way # to run chromium-based browsers in docker. browser is running # as unprivileged user and it's in a container: trade-off @@ -152,13 +154,18 @@ def navigate(self, url: str = "", timeout_wait_page: int = 0): self.last_url = url logger.info(f"{self._driver.session_id}: Navigating to {url=}") self._driver.get(url) - # dinamically wait for page to load its content with a fallback - # of `timeout_wait_page` seconds. - # waiting to see if any visible input tag appears + # dinamically wait for page to load its content with a fallback of + # `timeout_wait_page` seconds. waiting for any visible input tag to appear if timeout_wait_page: - WebDriverWait(self._driver, timeout=timeout_wait_page).until( - EC.visibility_of_any_elements_located((By.TAG_NAME, "input")) - ) + try: + WebDriverWait(self._driver, timeout=timeout_wait_page).until( + EC.visibility_of_any_elements_located((By.TAG_NAME, "input")) + ) + except TimeoutException: + logger.info( + "Timeout for input tag to appear exceeded! " + "This could mean that the page has no input tag to compile!" + ) @driver_exception_handler def get_page_source(self) -> str: diff --git a/integrations/phishing_analyzers/compose.yml b/integrations/phishing_analyzers/compose.yml index d6245ff4b9..2d5afa29e4 100644 --- a/integrations/phishing_analyzers/compose.yml +++ b/integrations/phishing_analyzers/compose.yml @@ -15,10 +15,10 @@ services: depends_on: - uwsgi - chrome-webdriver: + chromium-webdriver: # tagging convention for chrome webdriver # https://github.com/SeleniumHQ/docker-selenium/wiki/Tagging-Convention - image: selenium/node-chrome:130.0.6723.91-chromedriver-130.0.6723.91-grid-4.26.0-20241101 + image: selenium/node-chromium:132.0.6834.159-chromedriver-132.0.6834.159-grid-4.28.1-20250202 shm_size: 2gb # https://github.com/SeleniumHQ/docker-selenium?tab=readme-ov-file#--shm-size2g depends_on: - selenium-hub @@ -37,7 +37,7 @@ services: - SE_BROWSER_LEFTOVERS_PROCESSES_SECS=86400 selenium-hub: - image: selenium/hub:4.26.0 + image: selenium/hub:4.28.1 container_name: selenium-hub environment: - SE_ENABLE_TRACING=false diff --git a/integrations/phishing_analyzers/hooks/build b/integrations/phishing_analyzers/hooks/build new file mode 100644 index 0000000000..3dc2ddf066 --- /dev/null +++ b/integrations/phishing_analyzers/hooks/build @@ -0,0 +1,18 @@ +#!/bin/bash + +echo "display path" +echo "$(pwd)" +echo "display dockerfile path" +echo $DOCKERFILE_PATH +echo "current branch" +echo "$SOURCE_BRANCH" + +version_regex='^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$' +if [[ "$SOURCE_BRANCH" == "master" || "$SOURCE_BRANCH" =~ $version_regex ]]; then + echo "The branch is master, proceeding with multi-arch build" + docker buildx create --name multiarch --use + docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --platform linux/arm64,linux/amd64 --push . +else + echo "The branch is not master, proceeding with classic build" + docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --push . +fi \ No newline at end of file diff --git a/integrations/phishing_analyzers/requirements.txt b/integrations/phishing_analyzers/requirements.txt index 6e7b9bfe65..bd2c7ecf1e 100644 --- a/integrations/phishing_analyzers/requirements.txt +++ b/integrations/phishing_analyzers/requirements.txt @@ -1,4 +1,8 @@ +# Flask-Shell2HTTP dynamically install the most recent supported version of Flask +# So, if you want reproducible builds, you must explicitly state the flask version you want to install Flask-Shell2HTTP-fork==1.9.2 +# Flask most recent versions require most recent versions of blinker +flask==2.3.3 gunicorn==23.0.0 selenium==4.25.0 selenium-wire==5.1.0 diff --git a/integrations/thug/Dockerfile b/integrations/thug/Dockerfile new file mode 100644 index 0000000000..d0beb233af --- /dev/null +++ b/integrations/thug/Dockerfile @@ -0,0 +1,21 @@ +# This base image is the one currently (02/2025) updated by the maintainer +# but it does not support ARM +# Plus, v6.11 is bugged, see https://github.com/buffer/thug/issues/397 +FROM thughoneyclient/thug:v6.11 + +USER root +ENV PROJECT_PATH=/opt/deploy +ENV LOG_PATH=/var/log/intel_owl/thug + +# 2. Build Flask REST API +WORKDIR ${PROJECT_PATH}/flask +COPY app.py requirements.txt entrypoint.sh ./ + +RUN pip3 install -r requirements.txt --no-cache-dir \ + && mkdir -p ${PROJECT_PATH}/thug \ + && chown -R ${USER}:${USER} . ${PROJECT_PATH}/thug \ + && chmod +x entrypoint.sh + +# Serve Flask application using gunicorn +EXPOSE 4002 +ENTRYPOINT ["./entrypoint.sh"] diff --git a/integrations/thug/app.py b/integrations/thug/app.py new file mode 100644 index 0000000000..927d5dd285 --- /dev/null +++ b/integrations/thug/app.py @@ -0,0 +1,88 @@ +# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl +# See the file 'LICENSE' for copying permission. + +import json +import logging +import os + +# system imports +import secrets +import shutil + +# web imports +from flask import Flask +from flask_executor import Executor +from flask_executor.futures import Future +from flask_shell2http import Shell2HTTP + +LOG_NAME = "thug" + +# get flask-shell2http logger instance +logger = logging.getLogger("flask_shell2http") +# logger config +formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") +log_level = os.getenv("LOG_LEVEL", logging.INFO) +log_path = os.getenv("LOG_PATH", f"/var/log/intel_owl/{LOG_NAME}") +# create new file handlers, files are created if doesn't already exists +fh = logging.FileHandler(f"{log_path}/{LOG_NAME}.log") +fh.setFormatter(formatter) +fh.setLevel(log_level) +fh_err = logging.FileHandler(f"{log_path}/{LOG_NAME}_errors.log") +fh_err.setFormatter(formatter) +fh_err.setLevel(logging.ERROR) +# add the handlers to the logger +logger.addHandler(fh) +logger.addHandler(fh_err) +logger.setLevel(log_level) + +# Globals +app = Flask(__name__) +app.config["SECRET_KEY"] = secrets.token_hex(16) +executor = Executor(app) +shell2http = Shell2HTTP(app, executor) + + +def intercept_thug_result(context, future: Future) -> None: + """ + Thug doesn't output result to standard output but to a file, + using this callback function, + we intercept the future object and update its result attribute + by reading the final analysis result from the saved result file + before it is ready to be consumed. + """ + # 1. get current result object + res = future.result() + # 2. dir from which we will read final analysis result + dir_loc = context.get("read_result_from", None) + if not dir_loc: + res["error"] += ", No specified file to read result from" + if res.get("returncode", -1) == 0: + res["returncode"] = -1 + else: + # 3. read saved result file, if it exists + f_loc = dir_loc + "/analysis/json/analysis.json" + if not os.path.exists(f_loc): + res["error"] += f", result file {f_loc} does not exists." + if res.get("returncode", -1) == 0: + res["returncode"] = -1 + else: + with open(f_loc, "r", encoding="utf-8") as fp: + try: + res["report"] = json.load(fp) + except json.JSONDecodeError: + res["report"] = fp.read() + + # 4. set final result after modifications + future._result = res # skipcq PYL-W0212 + + # 5. directory can be removed now + if dir_loc: + shutil.rmtree(dir_loc, ignore_errors=True) + + +# with this, we can make http calls to the endpoint: /thug +shell2http.register_command( + endpoint="thug", + command_name="/usr/local/bin/thug -qZF", + callback_fn=intercept_thug_result, +) diff --git a/integrations/thug/compose-tests.yml b/integrations/thug/compose-tests.yml new file mode 100644 index 0000000000..8a984e20c4 --- /dev/null +++ b/integrations/thug/compose-tests.yml @@ -0,0 +1,6 @@ +services: + thug: + build: + context: ../integrations/thug + dockerfile: Dockerfile + image: intelowlproject/intelowl_thug:test diff --git a/integrations/thug/compose.yml b/integrations/thug/compose.yml new file mode 100644 index 0000000000..75b87b6a5c --- /dev/null +++ b/integrations/thug/compose.yml @@ -0,0 +1,14 @@ +services: + thug: + image: intelowlproject/intelowl_thug:${REACT_APP_INTELOWL_VERSION} + container_name: intelowl_thug + restart: unless-stopped + expose: + - "4002" + env_file: + - env_file_integrations + volumes: + - generic_logs:/var/log/intel_owl + depends_on: + - uwsgi + diff --git a/integrations/thug/entrypoint.sh b/integrations/thug/entrypoint.sh new file mode 100755 index 0000000000..b95e3aeaf6 --- /dev/null +++ b/integrations/thug/entrypoint.sh @@ -0,0 +1,14 @@ +#!/bin/bash +mkdir -p ${LOG_PATH} +touch ${LOG_PATH}/gunicorn_access.log ${LOG_PATH}/gunicorn_errors.log ${LOG_PATH}/thug.log ${LOG_PATH}/thug_errors.log +chown -R ${USER}:${USER} ${LOG_PATH} +# change user +su thug -s /bin/bash +echo "running gunicorn" +# start flask server +/usr/local/bin/gunicorn 'app:app' \ + --bind '0.0.0.0:4002' \ + --user thug \ + --log-level ${LOG_LEVEL} \ + --access-logfile ${LOG_PATH}/gunicorn_access.log \ + --error-logfile ${LOG_PATH}/gunicorn_errors.log diff --git a/integrations/thug/requirements.txt b/integrations/thug/requirements.txt new file mode 100644 index 0000000000..9bdceaf549 --- /dev/null +++ b/integrations/thug/requirements.txt @@ -0,0 +1,5 @@ +# Flask-Shell2HTTP dynamically install the most recent supported version of Flask +# So, if you want reproducible builds, you must explicitly state the flask version you want to install +Flask-Shell2HTTP-fork==1.9.2 +flask==3.1.0 +gunicorn==23.0.0 \ No newline at end of file diff --git a/integrations/tor_analyzers/Dockerfile b/integrations/tor_analyzers/Dockerfile index dc1c6f800f..3680c66db8 100644 --- a/integrations/tor_analyzers/Dockerfile +++ b/integrations/tor_analyzers/Dockerfile @@ -1,8 +1,8 @@ -FROM python:3.8-slim +FROM python:3.12-slim -ENV PROJECT_PATH /opt/deploy -ENV LOG_PATH /var/log/intel_owl/tor_analyzers -ENV USER tor-user +ENV PROJECT_PATH=/opt/deploy +ENV LOG_PATH=/var/log/intel_owl/tor_analyzers +ENV USER=tor-user # Add a new low-privileged user RUN useradd -r -s /sbin/nologin ${USER} diff --git a/integrations/tor_analyzers/hooks/build b/integrations/tor_analyzers/hooks/build new file mode 100644 index 0000000000..3dc2ddf066 --- /dev/null +++ b/integrations/tor_analyzers/hooks/build @@ -0,0 +1,18 @@ +#!/bin/bash + +echo "display path" +echo "$(pwd)" +echo "display dockerfile path" +echo $DOCKERFILE_PATH +echo "current branch" +echo "$SOURCE_BRANCH" + +version_regex='^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$' +if [[ "$SOURCE_BRANCH" == "master" || "$SOURCE_BRANCH" =~ $version_regex ]]; then + echo "The branch is master, proceeding with multi-arch build" + docker buildx create --name multiarch --use + docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --platform linux/arm64,linux/amd64 --push . +else + echo "The branch is not master, proceeding with classic build" + docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --push . +fi \ No newline at end of file diff --git a/integrations/tor_analyzers/requirements.txt b/integrations/tor_analyzers/requirements.txt index fc3155a797..9bdceaf549 100644 --- a/integrations/tor_analyzers/requirements.txt +++ b/integrations/tor_analyzers/requirements.txt @@ -1,2 +1,5 @@ +# Flask-Shell2HTTP dynamically install the most recent supported version of Flask +# So, if you want reproducible builds, you must explicitly state the flask version you want to install Flask-Shell2HTTP-fork==1.9.2 +flask==3.1.0 gunicorn==23.0.0 \ No newline at end of file diff --git a/intel_owl/settings/_util.py b/intel_owl/settings/_util.py index d863e7be5b..747d9bcade 100644 --- a/intel_owl/settings/_util.py +++ b/intel_owl/settings/_util.py @@ -29,3 +29,14 @@ def set_permissions(directory: Path, force_create: bool = False): os.chown(directory, uid, gid) for path in directory.rglob("*"): os.chown(path, uid, gid) + + +def get_environment() -> str: + from intel_owl.settings import STAGE_PRODUCTION, STAGE_STAGING + + if STAGE_PRODUCTION: + return "prod" + elif STAGE_STAGING: + return "stag" + else: + return "test" diff --git a/intel_owl/tasks.py b/intel_owl/tasks.py index 2ca9d9b3fd..4947772b20 100644 --- a/intel_owl/tasks.py +++ b/intel_owl/tasks.py @@ -25,6 +25,7 @@ from api_app.choices import ReportStatus, Status from intel_owl import secrets from intel_owl.celery import app, get_queue_name +from intel_owl.settings._util import get_environment logger = logging.getLogger(__name__) @@ -441,6 +442,7 @@ def _convert_report_to_elastic_document( "_op_type": "index", "_index": ( "plugin-report-" + f"{get_environment()}-" f"{inflection.underscore(_class.__name__).replace('_', '-')}-" f"{now().date()}" ), diff --git a/requirements/hardcoded-requirements.txt b/requirements/hardcoded-requirements.txt new file mode 100644 index 0000000000..0e7ae68042 --- /dev/null +++ b/requirements/hardcoded-requirements.txt @@ -0,0 +1,15 @@ +# This file is used exclusively to trigger dependabot PR. +# The update of the dependencies here has no actual effect. +# For each updated dependency here you have to update its relative version hardcoded in the code. +# So please add a comment for each dependency explaining where the related code must be updated. + +# docker/Dockerfile +pycti==6.5.1 +# integrations/malware_tools_analyzers/Dockerfile +flare-capa==9.0.0 +flare-floss==3.1.1 + +# other unmanaged versions +# droydlys - they make no new versions, we pin the commit +# goresym - they create releases in the repo +# boxjs - we can get that info from npm packages \ No newline at end of file diff --git a/requirements/pre-requirements.txt b/requirements/pre-requirements.txt deleted file mode 100644 index 3ce13e1d42..0000000000 --- a/requirements/pre-requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -python-dotenv>=0.21.0 -GitPython>=3.1.30 diff --git a/requirements/project-requirements.txt b/requirements/project-requirements.txt index 787ddb56b4..cbd2c61f10 100644 --- a/requirements/project-requirements.txt +++ b/requirements/project-requirements.txt @@ -41,7 +41,7 @@ elasticsearch-dsl==8.17.0 GitPython==3.1.41 checkdmarc==5.7.9 dnspython==2.7.0 -dnstwist[full]==20240812 +dnstwist[full]==20250130 google>=3.0.0 google-cloud-webrisk==1.15.0 intezer-sdk==1.21 @@ -87,6 +87,8 @@ dotnetfile==0.2.4 docxpy==0.8.5 pylnk3==0.4.2 androguard==3.4.0a1 # version >=4.x of androguard raises a dependency conflict with quark-engine==25.1.1 +wad==0.4.6 +die-python==0.2.0 # this is required because XLMMacroDeobfuscator does not pin the following packages pyxlsb2==0.0.8 @@ -100,4 +102,4 @@ pyzipper==0.3.6 dateparser==1.2.0 # phishing form compiler module lxml==5.3.0 -Faker==30.8.0 \ No newline at end of file +Faker==35.2.0 \ No newline at end of file diff --git a/start b/start index 1f257e40cb..c4dbf130af 100755 --- a/start +++ b/start @@ -31,6 +31,8 @@ print_help () { echo " file." echo " --malware_tools_analyzers Uses the integrations/malware_tools_analyzers/" echo " compose.yml file." + echo " --thug Uses the integrations/thug/compose.yml" + echo " file." echo " --cyberchef Uses the integrations/cyberchef/compose.yml" echo " file." echo " --pcap_analyzers Uses the integrations/pcap_analyzers/compose.yml" @@ -99,7 +101,7 @@ check_parameters "$@" && shift 2 load_env "docker/.env" current_version=${REACT_APP_INTELOWL_VERSION/"v"/""} -docker_analyzers=("pcap_analyzers" "tor_analyzers" "malware_tools_analyzers" "cyberchef" "phoneinfoga" "phishing_analyzers") +docker_analyzers=("pcap_analyzers" "tor_analyzers" "malware_tools_analyzers" "thug" "cyberchef" "phoneinfoga" "phishing_analyzers" "nuclei_analyzer") for value in "${docker_analyzers[@]}"; do @@ -147,10 +149,18 @@ while [[ $# -gt 0 ]]; do analyzers["tor_analyzers"]=true shift 1 ;; + --nuclei_analyzer) + analyzers["nuclei_analyzer"]=true + shift 1 + ;; --malware_tools_analyzers) analyzers["malware_tools_analyzers"]=true shift 1 ;; + --thug) + analyzers["thug"]=true + shift 1 + ;; --cyberchef) analyzers["cyberchef"]=true shift 1 diff --git a/tests/api_app/playbooks_manager/test_queryset.py b/tests/api_app/playbooks_manager/test_queryset.py index 5896ee41d8..b21914ab72 100644 --- a/tests/api_app/playbooks_manager/test_queryset.py +++ b/tests/api_app/playbooks_manager/test_queryset.py @@ -4,8 +4,10 @@ from api_app.models import Job from api_app.playbooks_manager.models import PlaybookConfig from api_app.playbooks_manager.queryset import PlaybookConfigQuerySet +from authentication.models import UserProfile from certego_saas.apps.organization.membership import Membership from certego_saas.apps.organization.organization import Organization +from certego_saas.apps.user.models import User from tests import CustomTestCase @@ -69,11 +71,28 @@ def test__subquery_other(self): self.assertEqual(2, pc.weight) def test_ordered_for_user(self): - PlaybookConfig.objects.create(name="second", type=["ip"], description="test") + """Check user playbooks are in the correct order and the robot jobs are excluded""" + + try: + robot = User.objects.get(is_superuser=False, username="robot") + except User.DoesNotExist: + robot = User.objects.create( + username="robot", email="robot@intelowl.com", password="test" + ) + robot_profile = UserProfile.objects.get(user=robot) + robot_profile.is_robot = True + robot_profile.save() + + org = Organization.objects.create(name="test_org") + Membership.objects.create(user=self.user, organization=org, is_owner=True) + Membership.objects.create(user=robot, organization=org) + + pc2 = PlaybookConfig.objects.create( + name="second", type=["ip"], description="test" + ) pc3 = PlaybookConfig.objects.create( name="third", type=["ip"], description="test" ) - pc4 = PlaybookConfig.objects.create( name="fourth", type=["ip"], description="test" ) @@ -94,7 +113,6 @@ def test_ordered_for_user(self): playbook_to_execute=self.pc, finished_analysis_time=now(), ) - Job.objects.create( user=self.user, observable_name="test3.com", @@ -111,6 +129,49 @@ def test_ordered_for_user(self): playbook_to_execute=pc4, finished_analysis_time=now(), ) + # robot jobs + Job.objects.create( + user=robot, + observable_name="test_robot.com", + observable_classification="domain", + status="reported_without_fails", + playbook_to_execute=pc2, + finished_analysis_time=now(), + ) + Job.objects.create( + user=robot, + observable_name="test_robot.com", + observable_classification="domain", + status="reported_without_fails", + playbook_to_execute=pc2, + finished_analysis_time=now(), + ) + Job.objects.create( + user=robot, + observable_name="test_robot.com", + observable_classification="domain", + status="reported_without_fails", + playbook_to_execute=pc2, + finished_analysis_time=now(), + ) + + Job.objects.create( + user=robot, + observable_name="test_robot.com", + observable_classification="domain", + status="reported_without_fails", + playbook_to_execute=pc2, + finished_analysis_time=now(), + ) + Job.objects.create( + user=robot, + observable_name="test_robot.com", + observable_classification="domain", + status="reported_without_fails", + playbook_to_execute=pc2, + finished_analysis_time=now(), + ) + pcs = ( PlaybookConfig.objects.ordered_for_user(self.user) .filter(description="test") diff --git a/tests/api_app/test_api.py b/tests/api_app/test_api.py index e3cfcc77b2..cbebc5612c 100644 --- a/tests/api_app/test_api.py +++ b/tests/api_app/test_api.py @@ -12,6 +12,7 @@ from api_app import models from api_app.analyzers_manager.models import AnalyzerConfig +from api_app.connectors_manager.models import ConnectorConfig from api_app.playbooks_manager.models import PlaybookConfig from .. import CustomViewSetTestCase @@ -288,6 +289,50 @@ def test_analyze_multiple_observables(self): msg=msg, ) + def test_observable_no_analyzers_only_connector(self): + models.PluginConfig.objects.create( + value="test subject", + parameter=models.Parameter.objects.get( + name="subject", + python_module=models.PythonModule.objects.get( + module="email_sender.EmailSender" + ), + ), + connector_config=ConnectorConfig.objects.get(name="EmailSender"), + ) + models.PluginConfig.objects.create( + value="test body", + parameter=models.Parameter.objects.get( + name="body", + python_module=models.PythonModule.objects.get( + module="email_sender.EmailSender" + ), + ), + connector_config=ConnectorConfig.objects.get(name="EmailSender"), + ) + + data = { + "observables": [ + ["ip", "8.8.8.8"], + ], + "connectors_requested": ["EmailSender"], + "tlp": "CLEAR", + } + response = self.client.post( + "/api/analyze_multiple_observables", data, format="json" + ) + contents = response.json() + msg = (response.status_code, contents) + self.assertEqual(response.status_code, 200, msg=msg) + + content = contents["results"][0] + + job_id = int(content["job_id"]) + job = models.Job.objects.get(pk=job_id) + self.assertEqual(data["observables"][0][1], job.observable_name, msg=msg) + self.assertEqual(job.analyzers_requested.count(), 0) + self.assertEqual(job.pivots_to_execute.count(), 0) + def test_download_sample_200(self): self.assertEqual(models.Job.objects.count(), 0) filename = "file.exe" diff --git a/tests/api_app/test_views.py b/tests/api_app/test_views.py index c978fa8809..acb24f0c3f 100644 --- a/tests/api_app/test_views.py +++ b/tests/api_app/test_views.py @@ -336,17 +336,43 @@ def test_agg_top_playbook_200(self): ) def test_agg_top_user_200(self): + u = User.objects.create( + username="test ;space@intelowl.org", + email="test ;space@intelowl.org", + is_superuser=False, + ) + with patch( + "django.utils.timezone.now", + return_value=datetime.datetime(2024, 11, 28, tzinfo=datetime.timezone.utc), + ): + + job, _ = Job.objects.get_or_create( + **{ + "user": u, + "is_sample": False, + "observable_name": "1.2.3.4", + "observable_classification": "ip", + "playbook_to_execute": PlaybookConfig.objects.get(name="Dns"), + "tlp": Job.TLP.CLEAR.value, + } + ) resp = self.client.get(self.agg_top_user) self.assertEqual(resp.status_code, 200) self.assertEqual( resp.json(), { - "values": ["superuser@intelowl.org"], + "values": ["superuser@intelowl.org", "test ;space@intelowl.org"], "aggregation": [ - {"date": "2024-11-28T00:00:00Z", "superuser@intelowl.org": 2} + { + "date": "2024-11-28T00:00:00Z", + "superuser@intelowl.org": 2, + "testspace@intelowl.org": 1, + }, ], }, ) + job.delete() + u.delete() def test_agg_top_tlp_200(self): resp = self.client.get(self.agg_top_tlp) @@ -595,7 +621,6 @@ class PluginConfigViewSetTestCase(CustomViewSetTestCase): def setUp(self): super().setUp() - PluginConfig.objects.all().delete() def test_plugin_config(self): org = Organization.create("test_org", self.user) @@ -818,7 +843,7 @@ def test_plugin_config_list(self): if obj["attribute"] == pc0.attribute: needle = obj # the owner cannot see configs of other orgs (pc1) - if "organization" in obj.keys(): + if "organization" in obj.keys() and obj["organization"] is not None: self.assertEqual(obj["organization"], "testorg0") self.assertIsNotNone(needle) self.assertIn("type", needle) @@ -845,7 +870,7 @@ def test_plugin_config_list(self): if obj["attribute"] == pc0.attribute: needle = obj # an admin cannot see configs of other orgs (pc1) - if "organization" in obj.keys(): + if "organization" in obj.keys() and obj["organization"] is not None: self.assertEqual(obj["organization"], "testorg0") self.assertIsNotNone(needle) self.assertIn("type", needle) @@ -872,7 +897,7 @@ def test_plugin_config_list(self): if obj["attribute"] == pc1.attribute: needle = obj # a user cannot see configs of other orgs (pc0) - if "organization" in obj.keys(): + if "organization" in obj.keys() and obj["organization"] is not None: self.assertEqual(obj["organization"], "testorg1") self.assertIsNotNone(needle) self.assertIn("type", needle) @@ -1242,11 +1267,11 @@ def test_delete(self): user=self.user, organization=org, is_owner=False, is_admin=False ) ac = AnalyzerConfig.objects.get(name="AbuseIPDB") - uri = "/api/plugin-config/1" + uri = "/api/plugin-config" # logged out self.client.logout() - response = self.client.delete(uri, {}, format="json") + response = self.client.delete(f"{uri}/1", {}, format="json") self.assertEqual(response.status_code, 401) param = Parameter.objects.create( @@ -1256,60 +1281,51 @@ def test_delete(self): required=True, type="str", ) - pc = PluginConfig( + pc, _ = PluginConfig.objects.get_or_create( value="supersecret", for_organization=True, owner=self.superuser, parameter=param, analyzer_config=ac, - id=1, ) - pc.full_clean() - pc.save() self.assertEqual(pc.owner, org.owner) # user can not delete org secret self.client.force_authenticate(user=self.user) - response = self.client.delete(uri, {}, format="json") + response = self.client.delete(f"{uri}/{pc.id}", {}, format="json") self.assertEqual(response.status_code, 403) # owner can delete org secret self.client.force_authenticate(user=self.superuser) - response = self.client.delete(uri, format="json") + response = self.client.delete(f"{uri}/{pc.id}", format="json") self.assertEqual(response.status_code, 204) - pc = PluginConfig( + pc, _ = PluginConfig.objects.get_or_create( value="supersecret", for_organization=True, owner=self.superuser, parameter=param, analyzer_config=ac, - id=1, ) - pc.full_clean() - pc.save() self.assertEqual(pc.owner, org.owner) # admin can delete org secret self.client.force_authenticate(user=self.admin) - response = self.client.delete(uri, {}, format="json") + response = self.client.delete(f"{uri}/{pc.id}", {}, format="json") self.assertEqual(response.status_code, 204) - pc = PluginConfig( + pc, _ = PluginConfig.objects.get_or_create( value="supersecret", for_organization=False, owner=self.user, parameter=param, analyzer_config=ac, - id=1, ) - pc.full_clean() - pc.save() self.assertEqual(pc.owner, self.user) # user can delete own personal secret self.client.force_authenticate(user=self.user) - response = self.client.delete(uri, {}, format="json") + response = self.client.delete(f"{uri}/{pc.id}", {}, format="json") self.assertEqual(response.status_code, 204) def test_get_403(self): diff --git a/tests/intel_owl/test_tasks.py b/tests/intel_owl/test_tasks.py index e15f1ffd79..d4cac66242 100644 --- a/tests/intel_owl/test_tasks.py +++ b/tests/intel_owl/test_tasks.py @@ -21,6 +21,7 @@ _now = datetime.datetime(2024, 10, 29, 11, tzinfo=datetime.UTC) +@patch("intel_owl.tasks.get_environment", return_value="unittest") @patch("intel_owl.tasks.now", return_value=_now) @patch("intel_owl.tasks.connections.get_connection") class SendElasticTestCase(CustomTestCase): @@ -202,7 +203,7 @@ def test_initial(self, *args, **kwargs): [ { "_op_type": "index", - "_index": "plugin-report-analyzer-report-2024-10-29", + "_index": "plugin-report-unittest-analyzer-report-2024-10-29", "_source": { "user": {"username": "test_elastic_user"}, "membership": { @@ -228,7 +229,7 @@ def test_initial(self, *args, **kwargs): }, { "_op_type": "index", - "_index": "plugin-report-analyzer-report-2024-10-29", + "_index": "plugin-report-unittest-analyzer-report-2024-10-29", "_source": { "user": {"username": "test_elastic_user"}, "membership": { @@ -254,7 +255,7 @@ def test_initial(self, *args, **kwargs): }, { "_op_type": "index", - "_index": "plugin-report-connector-report-2024-10-29", + "_index": "plugin-report-unittest-connector-report-2024-10-29", "_source": { "user": {"username": "test_elastic_user"}, "membership": { @@ -285,7 +286,7 @@ def test_initial(self, *args, **kwargs): }, { "_op_type": "index", - "_index": "plugin-report-pivot-report-2024-10-29", + "_index": "plugin-report-unittest-pivot-report-2024-10-29", "_source": { "user": { "username": "test_elastic_user", @@ -340,7 +341,7 @@ def test_update(self, *args, **kwargs): mocked_bulk_param, [ { - "_index": "plugin-report-analyzer-report-2024-10-29", + "_index": "plugin-report-unittest-analyzer-report-2024-10-29", "_op_type": "index", "_source": { "user": {"username": "test_elastic_user"}, @@ -366,7 +367,7 @@ def test_update(self, *args, **kwargs): }, }, { - "_index": "plugin-report-analyzer-report-2024-10-29", + "_index": "plugin-report-unittest-analyzer-report-2024-10-29", "_op_type": "index", "_source": { "user": {"username": "test_elastic_user"}, @@ -392,7 +393,7 @@ def test_update(self, *args, **kwargs): }, }, { - "_index": "plugin-report-connector-report-2024-10-29", + "_index": "plugin-report-unittest-connector-report-2024-10-29", "_op_type": "index", "_source": { "user": {"username": "test_elastic_user"}, @@ -423,7 +424,7 @@ def test_update(self, *args, **kwargs): }, }, { - "_index": "plugin-report-pivot-report-2024-10-29", + "_index": "plugin-report-unittest-pivot-report-2024-10-29", "_op_type": "index", "_source": { "user": {"username": "test_elastic_user"},