diff --git a/.flake8 b/.flake8
index 91efeb3301..6a78e690f9 100644
--- a/.flake8
+++ b/.flake8
@@ -12,4 +12,5 @@ exclude =
migrations,
virtualenv,
ldap_config.py
- api_app/analyzers_manager/migrations/*
\ No newline at end of file
+ api_app/analyzers_manager/migrations/*
+ api_app/ingestors_manager/migrations/*
\ No newline at end of file
diff --git a/.github/CHANGELOG.md b/.github/CHANGELOG.md
index 4199cefc47..8246dc6cee 100644
--- a/.github/CHANGELOG.md
+++ b/.github/CHANGELOG.md
@@ -2,6 +2,16 @@
[**Upgrade Guide**](https://intelowlproject.github.io/docs/IntelOwl/installation/#update-to-the-most-recent-version)
+## [v6.3.0](https://github.com/intelowlproject/IntelOwl/releases/tag/v6.3.0)
+
+This release brings official support for ARM architecture. From now on, our Docker builds are multi-platform. You can now run IntelOwl in your favourite ARM machine smoothly, e.g. Apple Silicon Mac and Raspberry PI.
+
+We have few new analyzers that you can play with (in particular new Vulnerability scanners like WAD, Nuclei) and updated Abuse.Ch analyzers to allow the configuration of your API key.
+
+Then we have a lot of fixes and dependencies upgrades as usual.
+
+Happy hunting!
+
## [v6.2.1](https://github.com/intelowlproject/IntelOwl/releases/tag/v6.2.1)
Minor fixes and dependencies upgrades
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index a039474196..305e802a1e 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -7,12 +7,12 @@ updates:
day: "tuesday"
target-branch: "develop"
ignore:
- # ignore all patch updates since we are using ~=
- # this does not work for security updates
- dependency-name: "*"
update-types: ["version-update:semver-patch"]
- dependency-name: "boto3"
update-types: ["version-update:semver-minor"]
+ - dependency-name: "faker"
+ update-types: ["version-update:semver-minor"]
- package-ecosystem: "pip"
directory: "/integrations/malware_tools_analyzers/requirements"
@@ -21,8 +21,6 @@ updates:
day: "tuesday"
target-branch: "develop"
ignore:
- # ignore all patch updates since we are using ~=
- # this does not work for security updates
- dependency-name: "*"
update-types: ["version-update:semver-patch"]
@@ -33,8 +31,6 @@ updates:
day: "tuesday"
target-branch: "develop"
ignore:
- # ignore all patch updates since we are using ~=
- # this does not work for security updates
- dependency-name: "*"
update-types: [ "version-update:semver-patch" ]
@@ -44,6 +40,16 @@ updates:
interval: "weekly"
day: "tuesday"
target-branch: "develop"
+ ignore:
+ - dependency-name: "*"
+ update-types: [ "version-update:semver-patch" ]
+
+ - package-ecosystem: "pip"
+ directory: "/integrations/nuclei_analyzer"
+ schedule:
+ interval: "weekly"
+ day: "tuesday"
+ target-branch: "develop"
ignore:
# ignore all patch updates since we are using ~=
# this does not work for security updates
@@ -57,8 +63,6 @@ updates:
day: "tuesday"
target-branch: "develop"
ignore:
- # ignore all patch updates since we are using ~=
- # this does not work for security updates
- dependency-name: "*"
update-types: [ "version-update:semver-patch" ]
@@ -78,8 +82,6 @@ updates:
day: "tuesday"
target-branch: "develop"
ignore:
- # ignore all patch updates since we are using ~=
- # this does not work for security updates
- dependency-name: "*"
update-types: ["version-update:semver-patch"]
@@ -90,8 +92,6 @@ updates:
day: "tuesday"
target-branch: "develop"
ignore:
- # ignore all patch updates since we are using ~=
- # this does not work for security updates
- dependency-name: "*"
update-types: ["version-update:semver-patch"]
@@ -102,8 +102,6 @@ updates:
day: "tuesday"
target-branch: "develop"
ignore:
- # ignore all patch updates since we are using ~=
- # this does not work for security updates
- dependency-name: "*"
update-types: ["version-update:semver-patch"]
@@ -113,6 +111,26 @@ updates:
interval: "weekly"
day: "tuesday"
target-branch: "develop"
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-patch"]
+
+ - package-ecosystem: "docker"
+ directory: "/integrations/cyberchef"
+ schedule:
+ interval: "weekly"
+ day: "tuesday"
+ target-branch: "develop"
+ ignore:
+ - dependency-name: "*"
+ update-types: ["version-update:semver-patch"]
+
+ - package-ecosystem: "docker"
+ directory: "/integrations/nuclei_analyzer"
+ schedule:
+ interval: "weekly"
+ day: "tuesday"
+ target-branch: "develop"
ignore:
# ignore all patch updates since we are using ~=
# this does not work for security updates
@@ -126,11 +144,19 @@ updates:
day: "tuesday"
target-branch: "develop"
ignore:
- # ignore all patch updates since we are using ~=
- # this does not work for security updates
- dependency-name: "*"
update-types: ["version-update:semver-patch"]
+ - package-ecosystem: "docker"
+ directory: "/integrations/thug"
+ schedule:
+ interval: "weekly"
+ day: "tuesday"
+ target-branch: "develop"
+ ignore:
+ - dependency-name: "*"
+ update-types: [ "version-update:semver-patch" ]
+
- package-ecosystem: "docker"
directory: "/integrations/phishing_analyzers"
schedule:
@@ -138,8 +164,6 @@ updates:
day: "tuesday"
target-branch: "develop"
ignore:
- # ignore all patch updates since we are using ~=
- # this does not work for security updates
- dependency-name: "*"
update-types: ["version-update:semver-patch"]
@@ -150,7 +174,5 @@ updates:
day: "tuesday"
target-branch: "develop"
ignore:
- # ignore all patch updates since we are using ~=
- # this does not work for security updates
- dependency-name: "*"
update-types: ["version-update:semver-patch"]
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index d71fd46ae2..67d1fb2267 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -28,6 +28,7 @@ Please delete options that are not relevant.
- [ ] If the plugin interacts with an external service, I have created an attribute called precisely `url` that contains this information. This is required for Health Checks.
- [ ] If the plugin requires mocked testing, `_monkeypatch()` was used in its class to apply the necessary decorators.
- [ ] I have added that raw JSON sample to the `MockUpResponse` of the `_monkeypatch()` method. This serves us to provide a valid sample for testing.
+- [ ] I have inserted the copyright banner at the start of the file: ```# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl # See the file 'LICENSE' for copying permission.```
- [ ] If external libraries/packages with restrictive licenses were used, they were added in the [Legal Notice](https://github.com/certego/IntelOwl/blob/master/.github/legal_notice.md) section.
- [ ] Linters (`Black`, `Flake`, `Isort`) gave 0 errors. If you have correctly installed [pre-commit](https://intelowlproject.github.io/docs/IntelOwl/contribute/#how-to-start-setup-project-and-development-instance), it does these checks and adjustments on your behalf.
- [ ] I have added tests for the feature/bug I solved (see `tests` folder). All the tests (new and old ones) gave 0 errors.
@@ -38,4 +39,4 @@ Please delete options that are not relevant.
### Important Rules
- If you miss to compile the Checklist properly, your PR won't be reviewed by the maintainers.
-- Everytime you make changes to the PR and you think the work is done, you should explicitly ask for a review. After being reviewed and received a "change request", you should explicitly ask for a review again once you have made the requested changes.
\ No newline at end of file
+- Everytime you make changes to the PR and you think the work is done, you should explicitly ask for a review by using GitHub's reviewing system detailed [here](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/requesting-a-pull-request-review).
\ No newline at end of file
diff --git a/.github/release_template.md b/.github/release_template.md
index bc8a70d65e..ab514efb76 100644
--- a/.github/release_template.md
+++ b/.github/release_template.md
@@ -21,4 +21,5 @@ WARNING: The release will be live within an hour!
```commandline
published #IntelOwl vX.X.X! https://github.com/intelowlproject/IntelOwl/releases/tag/vX.X.X #ThreatIntelligence #CyberSecurity #OpenSource #OSINT #DFIR
```
-- [ ] If that was a major release or an important release, communicate the news to the marketing staff
\ No newline at end of file
+- [ ] If that was a major release or an important release, communicate the news to the marketing staff
+- [ ] This is a good time to check for old dangling issues and clean-up the inactive ones. Same for issues solved by this release.
\ No newline at end of file
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 88fb828e28..8a362c3258 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -44,7 +44,7 @@ jobs:
fetch-depth: 2
- name: Set up Python
- uses: actions/setup-python@v5.3.0
+ uses: actions/setup-python@v5.4.0
with:
python-version: '3.11'
diff --git a/.github/workflows/pull_request_automation.yml b/.github/workflows/pull_request_automation.yml
index 3367b61724..0996bcb60a 100644
--- a/.github/workflows/pull_request_automation.yml
+++ b/.github/workflows/pull_request_automation.yml
@@ -37,7 +37,7 @@ jobs:
uses: actions/checkout@v4.2.2
- name: Set up Python
- uses: actions/setup-python@v5.3.0
+ uses: actions/setup-python@v5.4.0
with:
python-version: 3.11
diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml
index 6905a99563..eb5b3bc6ee 100644
--- a/.github/workflows/scorecard.yml
+++ b/.github/workflows/scorecard.yml
@@ -59,7 +59,7 @@ jobs:
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: "Upload artifact"
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
+ uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: SARIF file
path: results.sarif
diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
new file mode 100644
index 0000000000..47c654f289
--- /dev/null
+++ b/.github/workflows/stale.yml
@@ -0,0 +1,23 @@
+name: "Tag stale issues and pull requests"
+
+on:
+ schedule:
+ - cron: "0 9 * * *" # Runs every day at 9 AM
+ workflow_dispatch: # Allows the workflow to be triggered manually
+
+jobs:
+ stale:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/stale@v9
+ with:
+ repo-token: ${{ github.token }}
+ stale-pr-message: "This pull request has been marked as stale because it has had no activity for 10 days. If you are still working on this, please provide some updates or it will be closed in 5 days."
+ close-pr-message: "This pull request has been closed because it had no updates in 15 days. If you're still working on this fell free to reopen."
+ days-before-pr-stale: 10
+ days-before-pr-close: 5
+ stale-pr-label: "stale"
+ exempt-pr-labels: "keep-open"
+ operations-per-run: 100
+ debug-only: false
+ exempt-all-milestones: true
\ No newline at end of file
diff --git a/README.md b/README.md
index ceaf8576b9..cab204f90d 100644
--- a/README.md
+++ b/README.md
@@ -55,10 +55,10 @@ To know more about the project and its growth over time, you may be interested i
You can see the full list of all available analyzers in the [documentation](https://intelowlproject.github.io/docs/IntelOwl/usage/#analyzers).
-| Type | Analyzers Available |
-| -------------------------------------------------- |-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| Inbuilt modules | - Static Office Document, RTF, PDF, PE File Analysis and metadata extraction
- Strings Deobfuscation and analysis ([FLOSS](https://github.com/mandiant/flare-floss), [Stringsifter](https://github.com/mandiant/stringsifter), ...)
- PE Emulation with [Qiling](https://github.com/qilingframework/qiling) and [Speakeasy](https://github.com/mandiant/speakeasy)
- PE Signature verification
- PE Capabilities Extraction ([CAPA](https://github.com/mandiant/capa))
- Javascript Emulation ([Box-js](https://github.com/CapacitorSet/box-js))
- Android Malware Analysis ([Quark-Engine](https://github.com/quark-engine/quark-engine), ...)
- SPF and DMARC Validator
- Yara (a lot of public rules are available. You can also add your own rules)
- more... |
-| External services | - Abuse.ch MalwareBazaar/URLhaus/Threatfox/YARAify - GreyNoise v2
- Intezer
- VirusTotal v3
- Crowdsec
- URLscan
- Shodan
- AlienVault OTX
- Intelligence_X
- MISP
- many more.. |
+| Type | Analyzers Available |
+| -------------------------------------------------- |---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| Inbuilt modules | - Static Office Document, RTF, PDF, PE, ELF, APK File Analysis and metadata extraction
- Strings Deobfuscation and analysis ([FLOSS](https://github.com/mandiant/flare-floss), [Stringsifter](https://github.com/mandiant/stringsifter), ...)
- [Yara](https://virustotal.github.io/yara/), [ClamAV](https://www.clamav.net/) (a lot of public rules are available. You can also add your own rules)
- PE Emulation with [Qiling](https://github.com/qilingframework/qiling) and [Speakeasy](https://github.com/mandiant/speakeasy)
- PE Signature verification
- PE Capabilities Extraction ([CAPA](https://github.com/mandiant/capa) and [Blint](https://github.com/owasp-dep-scan/blint))
- Javascript Emulation ([Box-js](https://github.com/CapacitorSet/box-js))
- Android Malware Analysis ([Quark-Engine](https://github.com/quark-engine/quark-engine), [Androguard](https://github.com/androguard/androguard), [Mobsf](https://github.com/MobSF/mobsfscan/), ...)
- SPF and DMARC Validator
- PCAP Analysis with [Suricata](https://github.com/OISF/suricata) and [Hfinger](https://github.com/CERT-Polska/hfinger)
- Honeyclients ([Thug](https://github.com/buffer/thug), [Selenium](https://github.com/wkeeling/selenium-wire))
- Scanners ([WAD](https://github.com/CERN-CERT/WAD), [Nuclei](https://github.com/projectdiscovery/nuclei), ...)
- more... |
+| External services | - Abuse.ch MalwareBazaar/URLhaus/Threatfox/YARAify - GreyNoise v2
- Intezer
- VirusTotal v3
- Crowdsec
- URLscan
- Shodan
- AlienVault OTX
- Intelligence_X
- MISP
- many more.. |
## Partnerships and sponsors
@@ -86,7 +86,7 @@ IntelOwl was born out of Certego's Threat intelligence R&D division and is const
[The Honeynet Project](https://www.honeynet.org) is a non-profit organization working on creating open source cyber security tools and sharing knowledge about cyber threats.
-Thanks to Honeynet, we are hosting a public demo of the application [here](https://intelowl.honeynet.org). If you are interested, please contact a member of Honeynet to get access to the public service.
+Thanks to Honeynet, we are hosting a public demo of the application [here](https://intelowl.honeynet.org). If you are interested, please contact a member of Honeynet or an IntelOwl maintainer to get access to the public service.
#### Google Summer of Code
diff --git a/api_app/analyzers_manager/file_analyzers/detectiteasy.py b/api_app/analyzers_manager/file_analyzers/detectiteasy.py
index 3ac5e35e49..7ee3672e3f 100644
--- a/api_app/analyzers_manager/file_analyzers/detectiteasy.py
+++ b/api_app/analyzers_manager/file_analyzers/detectiteasy.py
@@ -1,60 +1,61 @@
+import json
import logging
-from api_app.analyzers_manager.classes import DockerBasedAnalyzer, FileAnalyzer
+import die
+
+from api_app.analyzers_manager.classes import FileAnalyzer
from tests.mock_utils import MockUpResponse
logger = logging.getLogger(__name__)
-class DetectItEasy(FileAnalyzer, DockerBasedAnalyzer):
- name: str = "executable_analyzer"
- url: str = "http://malware_tools_analyzers:4002/die"
- # http request polling max number of tries
- max_tries: int = 10
- # interval between http request polling (in secs)
- poll_distance: int = 1
+class DetectItEasy(FileAnalyzer):
def update(self):
pass
def run(self):
- fname = str(self.filename).replace("/", "_").replace(" ", "_")
- # get the file to send
- binary = self.read_file_bytes()
- args = [f"@{fname}", "--json"]
- req_data = {
- "args": args,
- }
- req_files = {fname: binary}
- logger.info(
- f"Running {self.analyzer_name} on {self.filename} with args: {args}"
+ logger.info(f"Running DIE on {self.filepath} for {self.md5}")
+
+ json_report = die.scan_file(
+ self.filepath, die.ScanFlags.RESULT_AS_JSON, str(die.database_path / "db")
)
- report = self._docker_run(req_data, req_files, analyzer_name=self.analyzer_name)
- if not report:
- self.report.errors.append("DIE did not detect the file type")
- return {}
- return report
+
+ return json.loads(json_report)
@staticmethod
def mocked_docker_analyzer_get(*args, **kwargs):
return MockUpResponse(
{
- "report": {
- "arch": "NOEXEC",
- "mode": "Unknown",
- "type": "Unknown",
- "detects": [
- {
- "name": "Zip",
- "type": "archive",
- "string": "archive: Zip(2.0)[38.5%,1 file]",
- "options": "38.5%,1 file",
- "version": "2.0",
- }
- ],
- "filetype": "Binary",
- "endianess": "LE",
- }
+ "detects": [
+ {
+ "filetype": "PE64",
+ "parentfilepart": "Header",
+ "values": [
+ {
+ "info": "Console64,console",
+ "name": "GNU linker ld (GNU Binutils)",
+ "string": "Linker: GNU linker ld (GNU Binutils)(2.28)[Console64,console]",
+ "type": "Linker",
+ "version": "2.28",
+ },
+ {
+ "info": "",
+ "name": "MinGW",
+ "string": "Compiler: MinGW",
+ "type": "Compiler",
+ "version": "",
+ },
+ {
+ "info": "NRV,brute",
+ "name": "UPX",
+ "string": "Packer: UPX(4.24)[NRV,brute]",
+ "type": "Packer",
+ "version": "4.24",
+ },
+ ],
+ }
+ ]
},
200,
)
diff --git a/api_app/analyzers_manager/file_analyzers/phishing/phishing_form_compiler.py b/api_app/analyzers_manager/file_analyzers/phishing/phishing_form_compiler.py
index ad2fdf2095..a552fc533e 100644
--- a/api_app/analyzers_manager/file_analyzers/phishing/phishing_form_compiler.py
+++ b/api_app/analyzers_manager/file_analyzers/phishing/phishing_form_compiler.py
@@ -1,13 +1,13 @@
import logging
from datetime import date, timedelta
from typing import Dict
-from urllib.parse import urlparse
import requests
from faker import Faker # skipcq: BAN-B410
from lxml.etree import HTMLParser # skipcq: BAN-B410
from lxml.html import document_fromstring
from requests import HTTPError, Response
+from requests.exceptions import MissingSchema
from api_app.analyzers_manager.classes import FileAnalyzer
from api_app.models import PythonConfig
@@ -138,25 +138,33 @@ def identify_text_input(self, input_name: str) -> str:
return fake_value
def extract_action_attribute(self, form) -> str:
- if not (form_action := form.get("action", None)):
+ form_action: str = form.get("action", None)
+ if not form_action:
logger.info(
f"'action' attribute not found in form. Defaulting to {self.target_site=}"
)
form_action = self.target_site
-
- # if relative url extracted, clean it from '/' and concatenate everything
- # if action was not extracted in previous step the if should not pass as it is a url
- if not urlparse(form_action).netloc:
+ elif form_action.startswith("/"): # pure relative url
logger.info(f"Found relative url in {form_action=}")
+ form_action = form_action.replace("/", "", 1)
base_site = self.target_site
+
if base_site.endswith("/"):
base_site = base_site[:-1]
- if form_action.startswith("/"):
- form_action = form_action.replace("/", "", 1)
+ form_action = base_site + "/" + form_action
+ elif (
+ "." in form_action and "://" not in form_action
+ ): # found a domain (relative file names such as "login.php" should start with /)
+ logger.info(f"Found a domain in form action {form_action=}")
+ else:
+ base_site = self.target_site
+ if base_site.endswith("/"):
+ base_site = base_site[:-1]
form_action = base_site + "/" + form_action
logger.info(f"Extracted action to post data to: {form_action}")
+
return form_action
def compile_form_field(self, form) -> dict:
@@ -200,16 +208,29 @@ def perform_request_to_form(self, form) -> Response:
headers = {
"User-Agent": self.user_agent,
}
- response = requests.post(
- url=dest_url,
- data=params,
- headers=headers,
- proxies=(
- {"http": self.proxy_address, "https": self.proxy_address}
- if self.proxy_address
- else None
- ),
- )
+ try:
+ response = requests.post(
+ url=dest_url,
+ data=params,
+ headers=headers,
+ proxies=(
+ {"http": self.proxy_address, "https": self.proxy_address}
+ if self.proxy_address
+ else None
+ ),
+ )
+ except MissingSchema:
+ logger.info(f"Adding default 'https://' schema to {dest_url}")
+ response = requests.post(
+ url="https://" + dest_url,
+ data=params,
+ headers=headers,
+ proxies=(
+ {"http": self.proxy_address, "https": self.proxy_address}
+ if self.proxy_address
+ else None
+ ),
+ )
logger.info(f"Request headers: {response.request.headers}")
return response
diff --git a/api_app/analyzers_manager/file_analyzers/thug_file.py b/api_app/analyzers_manager/file_analyzers/thug_file.py
index 3d0cdd6954..9bdbe3acb3 100644
--- a/api_app/analyzers_manager/file_analyzers/thug_file.py
+++ b/api_app/analyzers_manager/file_analyzers/thug_file.py
@@ -8,7 +8,7 @@
class ThugFile(FileAnalyzer, DockerBasedAnalyzer):
name: str = "Thug"
- url: str = "http://malware_tools_analyzers:4002/thug"
+ url: str = "http://thug:4002/thug"
# http request polling max number of tries
max_tries: int = 15
# interval between http request polling (in secs)
diff --git a/api_app/analyzers_manager/file_analyzers/yaraify_file_scan.py b/api_app/analyzers_manager/file_analyzers/yaraify_file_scan.py
index a1906c49bd..e43cbd6d9d 100644
--- a/api_app/analyzers_manager/file_analyzers/yaraify_file_scan.py
+++ b/api_app/analyzers_manager/file_analyzers/yaraify_file_scan.py
@@ -4,7 +4,6 @@
import json
import logging
import time
-from typing import Dict
import requests
@@ -27,7 +26,10 @@ class YARAifyFileScan(FileAnalyzer, YARAify):
skip_noisy: bool
skip_known: bool
- def config(self, runtime_configuration: Dict):
+ def update(self) -> bool:
+ pass
+
+ def config(self, runtime_configuration: dict):
FileAnalyzer.config(self, runtime_configuration)
self.query = "lookup_hash"
YARAify.config(self, runtime_configuration)
@@ -73,7 +75,9 @@ def run(self):
"file": (name_to_send, file),
}
logger.info(f"yara file scan md5 {self.md5} sending sample for analysis")
- response = requests.post(self.url, files=files_)
+ response = requests.post(
+ self.url, files=files_, headers=self.authentication_header
+ )
response.raise_for_status()
scan_response = response.json()
scan_query_status = scan_response.get("query_status")
@@ -92,7 +96,9 @@ def run(self):
f"task_id: {task_id}"
)
data = {"query": "get_results", "task_id": task_id}
- response = requests.post(self.url, json=data)
+ response = requests.post(
+ self.url, json=data, headers=self.authentication_header
+ )
response.raise_for_status()
task_response = response.json()
logger.debug(task_response)
diff --git a/api_app/analyzers_manager/migrations/0144_analyzer_config_ultradns_dns.py b/api_app/analyzers_manager/migrations/0144_analyzer_config_ultradns_dns.py
new file mode 100644
index 0000000000..d4ad149cb7
--- /dev/null
+++ b/api_app/analyzers_manager/migrations/0144_analyzer_config_ultradns_dns.py
@@ -0,0 +1,163 @@
+from django.db import migrations
+from django.db.models.fields.related_descriptors import (
+ ForwardManyToOneDescriptor,
+ ForwardOneToOneDescriptor,
+ ManyToManyDescriptor,
+ ReverseManyToOneDescriptor,
+ ReverseOneToOneDescriptor,
+)
+
+plugin = {
+ "python_module": {
+ "health_check_schedule": None,
+ "update_schedule": None,
+ "module": "dns.dns_resolvers.ultradns_dns_resolver.UltraDNSDNSResolver",
+ "base_path": "api_app.analyzers_manager.observable_analyzers",
+ },
+ "name": "UltraDNS_DNS",
+ "description": "Retrieve current domain resolution with UltraDNS",
+ "disabled": False,
+ "soft_time_limit": 30,
+ "routing_key": "default",
+ "health_check_status": True,
+ "type": "observable",
+ "docker_based": False,
+ "maximum_tlp": "AMBER",
+ "observable_supported": ["url", "domain"],
+ "supported_filetypes": [],
+ "run_hash": False,
+ "run_hash_type": "",
+ "not_supported_filetypes": [],
+ "mapping_data_model": {},
+ "model": "analyzers_manager.AnalyzerConfig",
+}
+
+params = [
+ {
+ "python_module": {
+ "module": "dns.dns_resolvers.ultradns_dns_resolver.UltraDNSDNSResolver",
+ "base_path": "api_app.analyzers_manager.observable_analyzers",
+ },
+ "name": "query_type",
+ "type": "str",
+ "description": "",
+ "is_secret": False,
+ "required": False,
+ }
+]
+
+values = [
+ {
+ "parameter": {
+ "python_module": {
+ "module": "dns.dns_resolvers.ultradns_dns_resolver.UltraDNSDNSResolver",
+ "base_path": "api_app.analyzers_manager.observable_analyzers",
+ },
+ "name": "query_type",
+ "type": "str",
+ "description": "",
+ "is_secret": False,
+ "required": False,
+ },
+ "analyzer_config": "UltraDNS_DNS",
+ "connector_config": None,
+ "visualizer_config": None,
+ "ingestor_config": None,
+ "pivot_config": None,
+ "for_organization": False,
+ "value": "A",
+ "updated_at": "2024-12-25T11:31:43.211468Z",
+ "owner": None,
+ }
+]
+
+
+def _get_real_obj(Model, field, value):
+ def _get_obj(Model, other_model, value):
+ if isinstance(value, dict):
+ real_vals = {}
+ for key, real_val in value.items():
+ real_vals[key] = _get_real_obj(other_model, key, real_val)
+ value = other_model.objects.get_or_create(**real_vals)[0]
+ # it is just the primary key serialized
+ else:
+ if isinstance(value, int):
+ if Model.__name__ == "PluginConfig":
+ value = other_model.objects.get(name=plugin["name"])
+ else:
+ value = other_model.objects.get(pk=value)
+ else:
+ value = other_model.objects.get(name=value)
+ return value
+
+ if (
+ type(getattr(Model, field))
+ in [
+ ForwardManyToOneDescriptor,
+ ReverseManyToOneDescriptor,
+ ReverseOneToOneDescriptor,
+ ForwardOneToOneDescriptor,
+ ]
+ and value
+ ):
+ other_model = getattr(Model, field).get_queryset().model
+ value = _get_obj(Model, other_model, value)
+ elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value:
+ other_model = getattr(Model, field).rel.model
+ value = [_get_obj(Model, other_model, val) for val in value]
+ return value
+
+
+def _create_object(Model, data):
+ mtm, no_mtm = {}, {}
+ for field, value in data.items():
+ value = _get_real_obj(Model, field, value)
+ if type(getattr(Model, field)) is ManyToManyDescriptor:
+ mtm[field] = value
+ else:
+ no_mtm[field] = value
+ try:
+ o = Model.objects.get(**no_mtm)
+ except Model.DoesNotExist:
+ o = Model(**no_mtm)
+ o.full_clean()
+ o.save()
+ for field, value in mtm.items():
+ attribute = getattr(o, field)
+ if value is not None:
+ attribute.set(value)
+ return False
+ return True
+
+
+def migrate(apps, schema_editor):
+ Parameter = apps.get_model("api_app", "Parameter")
+ PluginConfig = apps.get_model("api_app", "PluginConfig")
+ python_path = plugin.pop("model")
+ Model = apps.get_model(*python_path.split("."))
+ if not Model.objects.filter(name=plugin["name"]).exists():
+ exists = _create_object(Model, plugin)
+ if not exists:
+ for param in params:
+ _create_object(Parameter, param)
+ for value in values:
+ _create_object(PluginConfig, value)
+
+
+def reverse_migrate(apps, schema_editor):
+ python_path = plugin.pop("model")
+ Model = apps.get_model(*python_path.split("."))
+ Model.objects.get(name=plugin["name"]).delete()
+
+
+class Migration(migrations.Migration):
+ atomic = False
+ dependencies = [
+ ("api_app", "0065_job_mpnodesearch"),
+ (
+ "analyzers_manager",
+ "0143_alter_analyzer_config_phishing_extractor_and_form_compiler",
+ ),
+ ]
+
+ operations = [migrations.RunPython(migrate, reverse_migrate)]
diff --git a/api_app/analyzers_manager/migrations/0145_analyzer_config_ultradns_malicious_detector.py b/api_app/analyzers_manager/migrations/0145_analyzer_config_ultradns_malicious_detector.py
new file mode 100644
index 0000000000..027119f84d
--- /dev/null
+++ b/api_app/analyzers_manager/migrations/0145_analyzer_config_ultradns_malicious_detector.py
@@ -0,0 +1,128 @@
+from django.db import migrations
+from django.db.models.fields.related_descriptors import (
+ ForwardManyToOneDescriptor,
+ ForwardOneToOneDescriptor,
+ ManyToManyDescriptor,
+ ReverseManyToOneDescriptor,
+ ReverseOneToOneDescriptor,
+)
+
+plugin = {
+ "python_module": {
+ "health_check_schedule": None,
+ "update_schedule": None,
+ "module": "dns.dns_malicious_detectors.ultradns_malicious_detector.UltraDNSMaliciousDetector",
+ "base_path": "api_app.analyzers_manager.observable_analyzers",
+ },
+ "name": "UltraDNS_Malicious_Detector",
+ "description": "Scan if a DNS is marked malicious by UltraDNS",
+ "disabled": False,
+ "soft_time_limit": 30,
+ "routing_key": "default",
+ "health_check_status": True,
+ "type": "observable",
+ "docker_based": False,
+ "maximum_tlp": "AMBER",
+ "observable_supported": ["url", "domain"],
+ "supported_filetypes": [],
+ "run_hash": False,
+ "run_hash_type": "",
+ "not_supported_filetypes": [],
+ "mapping_data_model": {},
+ "model": "analyzers_manager.AnalyzerConfig",
+}
+
+params = []
+
+values = []
+
+
+def _get_real_obj(Model, field, value):
+ def _get_obj(Model, other_model, value):
+ if isinstance(value, dict):
+ real_vals = {}
+ for key, real_val in value.items():
+ real_vals[key] = _get_real_obj(other_model, key, real_val)
+ value = other_model.objects.get_or_create(**real_vals)[0]
+ # it is just the primary key serialized
+ else:
+ if isinstance(value, int):
+ if Model.__name__ == "PluginConfig":
+ value = other_model.objects.get(name=plugin["name"])
+ else:
+ value = other_model.objects.get(pk=value)
+ else:
+ value = other_model.objects.get(name=value)
+ return value
+
+ if (
+ type(getattr(Model, field))
+ in [
+ ForwardManyToOneDescriptor,
+ ReverseManyToOneDescriptor,
+ ReverseOneToOneDescriptor,
+ ForwardOneToOneDescriptor,
+ ]
+ and value
+ ):
+ other_model = getattr(Model, field).get_queryset().model
+ value = _get_obj(Model, other_model, value)
+ elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value:
+ other_model = getattr(Model, field).rel.model
+ value = [_get_obj(Model, other_model, val) for val in value]
+ return value
+
+
+def _create_object(Model, data):
+ mtm, no_mtm = {}, {}
+ for field, value in data.items():
+ value = _get_real_obj(Model, field, value)
+ if type(getattr(Model, field)) is ManyToManyDescriptor:
+ mtm[field] = value
+ else:
+ no_mtm[field] = value
+ try:
+ o = Model.objects.get(**no_mtm)
+ except Model.DoesNotExist:
+ o = Model(**no_mtm)
+ o.full_clean()
+ o.save()
+ for field, value in mtm.items():
+ attribute = getattr(o, field)
+ if value is not None:
+ attribute.set(value)
+ return False
+ return True
+
+
+def migrate(apps, schema_editor):
+ Parameter = apps.get_model("api_app", "Parameter")
+ PluginConfig = apps.get_model("api_app", "PluginConfig")
+ python_path = plugin.pop("model")
+ Model = apps.get_model(*python_path.split("."))
+ if not Model.objects.filter(name=plugin["name"]).exists():
+ exists = _create_object(Model, plugin)
+ if not exists:
+ for param in params:
+ _create_object(Parameter, param)
+ for value in values:
+ _create_object(PluginConfig, value)
+
+
+def reverse_migrate(apps, schema_editor):
+ python_path = plugin.pop("model")
+ Model = apps.get_model(*python_path.split("."))
+ Model.objects.get(name=plugin["name"]).delete()
+
+
+class Migration(migrations.Migration):
+ atomic = False
+ dependencies = [
+ ("api_app", "0065_job_mpnodesearch"),
+ (
+ "analyzers_manager",
+ "0144_analyzer_config_ultradns_dns",
+ ),
+ ]
+
+ operations = [migrations.RunPython(migrate, reverse_migrate)]
diff --git a/api_app/analyzers_manager/migrations/0146_analyzer_config_wad.py b/api_app/analyzers_manager/migrations/0146_analyzer_config_wad.py
new file mode 100644
index 0000000000..b9d20abf88
--- /dev/null
+++ b/api_app/analyzers_manager/migrations/0146_analyzer_config_wad.py
@@ -0,0 +1,128 @@
+from django.db import migrations
+from django.db.models.fields.related_descriptors import (
+ ForwardManyToOneDescriptor,
+ ForwardOneToOneDescriptor,
+ ManyToManyDescriptor,
+ ReverseManyToOneDescriptor,
+ ReverseOneToOneDescriptor,
+)
+
+plugin = {
+ "python_module": {
+ "health_check_schedule": None,
+ "update_schedule": None,
+ "module": "wad.WAD",
+ "base_path": "api_app.analyzers_manager.observable_analyzers",
+ },
+ "name": "WAD",
+ "description": "[WAD](https://github.com/CERN-CERT/WAD) (Web Application Detector) lets you analyze given URL(s) and detect technologies used by web application behind that URL, from the OS and web server level, to the programming platform and frameworks, as well as server- and client-side applications, tools and libraries.",
+ "disabled": False,
+ "soft_time_limit": 60,
+ "routing_key": "default",
+ "health_check_status": True,
+ "type": "observable",
+ "docker_based": False,
+ "maximum_tlp": "CLEAR",
+ "observable_supported": ["url"],
+ "supported_filetypes": [],
+ "run_hash": False,
+ "run_hash_type": "",
+ "not_supported_filetypes": [],
+ "mapping_data_model": {},
+ "model": "analyzers_manager.AnalyzerConfig",
+}
+
+params = []
+
+values = []
+
+
+def _get_real_obj(Model, field, value):
+ def _get_obj(Model, other_model, value):
+ if isinstance(value, dict):
+ real_vals = {}
+ for key, real_val in value.items():
+ real_vals[key] = _get_real_obj(other_model, key, real_val)
+ value = other_model.objects.get_or_create(**real_vals)[0]
+ # it is just the primary key serialized
+ else:
+ if isinstance(value, int):
+ if Model.__name__ == "PluginConfig":
+ value = other_model.objects.get(name=plugin["name"])
+ else:
+ value = other_model.objects.get(pk=value)
+ else:
+ value = other_model.objects.get(name=value)
+ return value
+
+ if (
+ type(getattr(Model, field))
+ in [
+ ForwardManyToOneDescriptor,
+ ReverseManyToOneDescriptor,
+ ReverseOneToOneDescriptor,
+ ForwardOneToOneDescriptor,
+ ]
+ and value
+ ):
+ other_model = getattr(Model, field).get_queryset().model
+ value = _get_obj(Model, other_model, value)
+ elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value:
+ other_model = getattr(Model, field).rel.model
+ value = [_get_obj(Model, other_model, val) for val in value]
+ return value
+
+
+def _create_object(Model, data):
+ mtm, no_mtm = {}, {}
+ for field, value in data.items():
+ value = _get_real_obj(Model, field, value)
+ if type(getattr(Model, field)) is ManyToManyDescriptor:
+ mtm[field] = value
+ else:
+ no_mtm[field] = value
+ try:
+ o = Model.objects.get(**no_mtm)
+ except Model.DoesNotExist:
+ o = Model(**no_mtm)
+ o.full_clean()
+ o.save()
+ for field, value in mtm.items():
+ attribute = getattr(o, field)
+ if value is not None:
+ attribute.set(value)
+ return False
+ return True
+
+
+def migrate(apps, schema_editor):
+ Parameter = apps.get_model("api_app", "Parameter")
+ PluginConfig = apps.get_model("api_app", "PluginConfig")
+ python_path = plugin.pop("model")
+ Model = apps.get_model(*python_path.split("."))
+ if not Model.objects.filter(name=plugin["name"]).exists():
+ exists = _create_object(Model, plugin)
+ if not exists:
+ for param in params:
+ _create_object(Parameter, param)
+ for value in values:
+ _create_object(PluginConfig, value)
+
+
+def reverse_migrate(apps, schema_editor):
+ python_path = plugin.pop("model")
+ Model = apps.get_model(*python_path.split("."))
+ Model.objects.get(name=plugin["name"]).delete()
+
+
+class Migration(migrations.Migration):
+ atomic = False
+ dependencies = [
+ ("api_app", "0065_job_mpnodesearch"),
+ (
+ "analyzers_manager",
+ "0145_analyzer_config_ultradns_malicious_detector",
+ ),
+ ]
+
+ operations = [migrations.RunPython(migrate, reverse_migrate)]
diff --git a/api_app/analyzers_manager/migrations/0147_alter_analyzer_config_feodo_yaraify_urlhaus_yaraify_scan.py b/api_app/analyzers_manager/migrations/0147_alter_analyzer_config_feodo_yaraify_urlhaus_yaraify_scan.py
new file mode 100644
index 0000000000..df862da114
--- /dev/null
+++ b/api_app/analyzers_manager/migrations/0147_alter_analyzer_config_feodo_yaraify_urlhaus_yaraify_scan.py
@@ -0,0 +1,98 @@
+from django.db import migrations
+
+
+def migrate(apps, schema_editor):
+ Parameter = apps.get_model("api_app", "Parameter")
+ PythonModule = apps.get_model("api_app", "PythonModule")
+
+ # observables
+ observable_analyzers = [
+ "urlhaus.URLHaus",
+ "yaraify.YARAify",
+ "feodo_tracker.Feodo_Tracker",
+ "threatfox.ThreatFox",
+ "mb_get.MB_GET",
+ "mb_google.MB_GOOGLE",
+ ]
+ for observable_analyzer in observable_analyzers:
+ module = PythonModule.objects.get(
+ module=observable_analyzer,
+ base_path="api_app.analyzers_manager.observable_analyzers",
+ )
+ Parameter.objects.create(
+ name="service_api_key",
+ type="str",
+ description="Optional API key to connect to abuse.ch services.",
+ is_secret=True,
+ required=False,
+ python_module=module,
+ )
+
+ # files
+ yaraify_scan_module = PythonModule.objects.get(
+ module="yaraify_file_scan.YARAifyFileScan",
+ base_path="api_app.analyzers_manager.file_analyzers",
+ )
+ Parameter.objects.create(
+ name="service_api_key",
+ type="str",
+ description="Optional API key to connect to abuse.ch services.",
+ is_secret=True,
+ required=False,
+ python_module=yaraify_scan_module,
+ )
+
+
+def reverse_migrate(apps, schema_editor):
+ Parameter = apps.get_model("api_app", "Parameter")
+ PythonModule = apps.get_model("api_app", "PythonModule")
+
+ # observables
+ observable_analyzers = [
+ "urlhaus.URLHaus",
+ "yaraify.YARAify",
+ "feodo_tracker.Feodo_Tracker",
+ "threatfox.ThreatFox",
+ "mb_get.MB_GET",
+ "mb_google.MB_GOOGLE",
+ ]
+ for observable_analyzer in observable_analyzers:
+ module = PythonModule.objects.get(
+ module=observable_analyzer,
+ base_path="api_app.analyzers_manager.observable_analyzers",
+ )
+ Parameter.objects.get(
+ name="service_api_key",
+ type="str",
+ description="Optional API key to connect to abuse.ch services.",
+ is_secret=True,
+ required=False,
+ python_module=module,
+ ).delete()
+
+ # files
+ yaraify_scan_module = PythonModule.objects.get(
+ module="yaraify_file_scan.YARAifyFileScan",
+ base_path="api_app.analyzers_manager.file_analyzers",
+ )
+ Parameter.objects.get(
+ name="service_api_key",
+ type="str",
+ description="Optional API key to connect to abuse.ch services.",
+ is_secret=True,
+ required=False,
+ python_module=yaraify_scan_module,
+ ).delete()
+
+
+class Migration(migrations.Migration):
+ atomic = False
+ dependencies = [
+ ("api_app", "0065_job_mpnodesearch"),
+ (
+ "analyzers_manager",
+ "0146_analyzer_config_wad",
+ ),
+ ]
+
+ operations = [migrations.RunPython(migrate, reverse_migrate)]
diff --git a/api_app/analyzers_manager/migrations/0148_analyzer_config_nuclei.py b/api_app/analyzers_manager/migrations/0148_analyzer_config_nuclei.py
new file mode 100644
index 0000000000..113076c536
--- /dev/null
+++ b/api_app/analyzers_manager/migrations/0148_analyzer_config_nuclei.py
@@ -0,0 +1,163 @@
+from django.db import migrations
+from django.db.models.fields.related_descriptors import (
+ ForwardManyToOneDescriptor,
+ ForwardOneToOneDescriptor,
+ ManyToManyDescriptor,
+ ReverseManyToOneDescriptor,
+ ReverseOneToOneDescriptor,
+)
+
+plugin = {
+ "python_module": {
+ "health_check_schedule": None,
+ "update_schedule": None,
+ "module": "nuclei.NucleiAnalyzer",
+ "base_path": "api_app.analyzers_manager.observable_analyzers",
+ },
+ "name": "Nuclei",
+ "description": "[Nuclei](https://github.com/projectdiscovery/nuclei) is a fast, customizable vulnerability scanner that leverages YAML-based templates to detect, rank, and address security flaws. It operates using structured templates that define specific security checks.",
+ "disabled": False,
+ "soft_time_limit": 1200,
+ "routing_key": "default",
+ "health_check_status": True,
+ "type": "observable",
+ "docker_based": True,
+ "maximum_tlp": "RED",
+ "observable_supported": ["ip", "url"],
+ "supported_filetypes": [],
+ "run_hash": False,
+ "run_hash_type": "",
+ "not_supported_filetypes": [],
+ "mapping_data_model": {},
+ "model": "analyzers_manager.AnalyzerConfig",
+}
+
+params = [
+ {
+ "python_module": {
+ "module": "nuclei.NucleiAnalyzer",
+ "base_path": "api_app.analyzers_manager.observable_analyzers",
+ },
+ "name": "template_dirs",
+ "type": "list",
+ "description": "The template_dirs parameter allows you to specify a list of directories containing templates, each focusing on a particular category of vulnerabilities, exposures, or security assessments.\r\nAvailable Template Categories:\r\ncloud\r\ncode\r\ncves\r\nvulnerabilities\r\ndns\r\nfile\r\nheadless\r\nhelpers\r\nhttp\r\njavascript\r\nnetwork\r\npassive\r\nprofiles\r\nssl\r\nworkflows\r\nexposures",
+ "is_secret": False,
+ "required": False,
+ }
+]
+
+values = [
+ {
+ "parameter": {
+ "python_module": {
+ "module": "nuclei.NucleiAnalyzer",
+ "base_path": "api_app.analyzers_manager.observable_analyzers",
+ },
+ "name": "template_dirs",
+ "type": "list",
+ "description": "The template_dirs parameter allows you to specify a list of directories containing templates, each focusing on a particular category of vulnerabilities, exposures, or security assessments.\r\nAvailable Template Categories:\r\ncloud\r\ncode\r\ncves\r\nvulnerabilities\r\ndns\r\nfile\r\nheadless\r\nhelpers\r\nhttp\r\njavascript\r\nnetwork\r\npassive\r\nprofiles\r\nssl\r\nworkflows\r\nexposures",
+ "is_secret": False,
+ "required": False,
+ },
+ "analyzer_config": "Nuclei",
+ "connector_config": None,
+ "visualizer_config": None,
+ "ingestor_config": None,
+ "pivot_config": None,
+ "for_organization": False,
+ "value": [],
+ "updated_at": "2025-01-08T08:33:45.653741Z",
+ "owner": None,
+ }
+]
+
+
+def _get_real_obj(Model, field, value):
+ def _get_obj(Model, other_model, value):
+ if isinstance(value, dict):
+ real_vals = {}
+ for key, real_val in value.items():
+ real_vals[key] = _get_real_obj(other_model, key, real_val)
+ value = other_model.objects.get_or_create(**real_vals)[0]
+ # it is just the primary key serialized
+ else:
+ if isinstance(value, int):
+ if Model.__name__ == "PluginConfig":
+ value = other_model.objects.get(name=plugin["name"])
+ else:
+ value = other_model.objects.get(pk=value)
+ else:
+ value = other_model.objects.get(name=value)
+ return value
+
+ if (
+ type(getattr(Model, field))
+ in [
+ ForwardManyToOneDescriptor,
+ ReverseManyToOneDescriptor,
+ ReverseOneToOneDescriptor,
+ ForwardOneToOneDescriptor,
+ ]
+ and value
+ ):
+ other_model = getattr(Model, field).get_queryset().model
+ value = _get_obj(Model, other_model, value)
+ elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value:
+ other_model = getattr(Model, field).rel.model
+ value = [_get_obj(Model, other_model, val) for val in value]
+ return value
+
+
+def _create_object(Model, data):
+ mtm, no_mtm = {}, {}
+ for field, value in data.items():
+ value = _get_real_obj(Model, field, value)
+ if type(getattr(Model, field)) is ManyToManyDescriptor:
+ mtm[field] = value
+ else:
+ no_mtm[field] = value
+ try:
+ o = Model.objects.get(**no_mtm)
+ except Model.DoesNotExist:
+ o = Model(**no_mtm)
+ o.full_clean()
+ o.save()
+ for field, value in mtm.items():
+ attribute = getattr(o, field)
+ if value is not None:
+ attribute.set(value)
+ return False
+ return True
+
+
+def migrate(apps, schema_editor):
+ Parameter = apps.get_model("api_app", "Parameter")
+ PluginConfig = apps.get_model("api_app", "PluginConfig")
+ python_path = plugin.pop("model")
+ Model = apps.get_model(*python_path.split("."))
+ if not Model.objects.filter(name=plugin["name"]).exists():
+ exists = _create_object(Model, plugin)
+ if not exists:
+ for param in params:
+ _create_object(Parameter, param)
+ for value in values:
+ _create_object(PluginConfig, value)
+
+
+def reverse_migrate(apps, schema_editor):
+ python_path = plugin.pop("model")
+ Model = apps.get_model(*python_path.split("."))
+ Model.objects.get(name=plugin["name"]).delete()
+
+
+class Migration(migrations.Migration):
+ atomic = False
+ dependencies = [
+ ("api_app", "0065_job_mpnodesearch"),
+ (
+ "analyzers_manager",
+ "0147_alter_analyzer_config_feodo_yaraify_urlhaus_yaraify_scan",
+ ),
+ ]
+
+ operations = [migrations.RunPython(migrate, reverse_migrate)]
diff --git a/api_app/analyzers_manager/migrations/0149_alter_die_analyzer.py b/api_app/analyzers_manager/migrations/0149_alter_die_analyzer.py
new file mode 100644
index 0000000000..d16cf22707
--- /dev/null
+++ b/api_app/analyzers_manager/migrations/0149_alter_die_analyzer.py
@@ -0,0 +1,35 @@
+from django.db import migrations
+
+
+def migrate(apps, schema_editor):
+ PythonModule = apps.get_model("api_app", "PythonModule")
+
+ pm = PythonModule.objects.get(
+ module="detectiteasy.DetectItEasy",
+ base_path="api_app.analyzers_manager.file_analyzers",
+ )
+ pm.parameters.all().delete()
+
+ AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig")
+
+ ac = AnalyzerConfig.objects.get(
+ name="DetectItEasy",
+ )
+ ac.docker_based = False
+ ac.save()
+
+
+def reverse_migrate(apps, schema_editor): ...
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ("api_app", "0065_job_mpnodesearch"),
+ (
+ "analyzers_manager",
+ "0148_analyzer_config_nuclei",
+ ),
+ ]
+ operations = [
+ migrations.RunPython(migrate, reverse_migrate),
+ ]
diff --git a/api_app/analyzers_manager/observable_analyzers/ailtyposquatting.py b/api_app/analyzers_manager/observable_analyzers/ailtyposquatting.py
index 0b79815813..c8981ba401 100644
--- a/api_app/analyzers_manager/observable_analyzers/ailtyposquatting.py
+++ b/api_app/analyzers_manager/observable_analyzers/ailtyposquatting.py
@@ -1,3 +1,5 @@
+# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl
+# See the file 'LICENSE' for copying permission.
import logging
import math
diff --git a/api_app/analyzers_manager/observable_analyzers/apivoid.py b/api_app/analyzers_manager/observable_analyzers/apivoid.py
index 1bc8e2dcc6..f7548844b1 100644
--- a/api_app/analyzers_manager/observable_analyzers/apivoid.py
+++ b/api_app/analyzers_manager/observable_analyzers/apivoid.py
@@ -1,9 +1,12 @@
# flake8: noqa
-# done for the mocked respose,
+# done for the mocked response,
# everything else is linted and tested
+# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl
+# See the file 'LICENSE' for copying permission.
import requests
from api_app.analyzers_manager import classes
+from api_app.analyzers_manager.exceptions import AnalyzerConfigurationException
from tests.mock_utils import MockUpResponse, if_mock_connections, patch
@@ -16,27 +19,18 @@ def update(self):
def run(self):
if self.observable_classification == self.ObservableTypes.DOMAIN.value:
- url = (
- self.url
- + f"""/domainbl/v1/pay-as-you-go/
- ?key={self._api_key}
- &host={self.observable_name}"""
- )
+ path = "domainbl"
+ parameter = "host"
elif self.observable_classification == self.ObservableTypes.IP.value:
- url = (
- self.url
- + f"""/iprep/v1/pay-as-you-go/
- ?key={self._api_key}
- &ip={self.observable_name}"""
- )
+ path = "iprep"
+ parameter = "ip"
elif self.observable_classification == self.ObservableTypes.URL.value:
- url = (
- self.url
- + f"""/urlrep/v1/pay-as-you-go/
- ?key={self._api_key}
- &url={self.observable_name}"""
- )
- r = requests.get(url)
+ path = "urlrep"
+ parameter = "url"
+ else:
+ raise AnalyzerConfigurationException("not supported")
+ complete_url = f"{self.url}/{path}/v1/pay-as-you-go/?key={self._api_key}&{parameter}={self.observable_name}"
+ r = requests.get(complete_url)
r.raise_for_status()
return r.json()
diff --git a/api_app/analyzers_manager/observable_analyzers/basic_observable_analyzer.py b/api_app/analyzers_manager/observable_analyzers/basic_observable_analyzer.py
index 3b938790d1..30ad4be730 100644
--- a/api_app/analyzers_manager/observable_analyzers/basic_observable_analyzer.py
+++ b/api_app/analyzers_manager/observable_analyzers/basic_observable_analyzer.py
@@ -1,3 +1,5 @@
+# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl
+# See the file 'LICENSE' for copying permission.
import base64
import logging
from tempfile import NamedTemporaryFile
diff --git a/api_app/analyzers_manager/observable_analyzers/dns/dns_malicious_detectors/ultradns_malicious_detector.py b/api_app/analyzers_manager/observable_analyzers/dns/dns_malicious_detectors/ultradns_malicious_detector.py
new file mode 100644
index 0000000000..e1c3dc9155
--- /dev/null
+++ b/api_app/analyzers_manager/observable_analyzers/dns/dns_malicious_detectors/ultradns_malicious_detector.py
@@ -0,0 +1,57 @@
+import ipaddress
+from urllib.parse import urlparse
+
+import dns.resolver
+
+from api_app.analyzers_manager import classes
+from api_app.analyzers_manager.exceptions import AnalyzerRunException
+
+from ..dns_responses import malicious_detector_response
+
+
+class UltraDNSMaliciousDetector(classes.ObservableAnalyzer):
+ """Resolve a DNS query with UltraDNS servers,
+ if the response falls within the sinkhole range, the domain is malicious.
+ """
+
+ def update(self) -> bool:
+ pass
+
+ def run(self):
+ is_malicious = False
+ observable = self.observable_name
+
+ # for URLs we are checking the relative domain
+ if self.observable_classification == self.ObservableTypes.URL:
+ observable = urlparse(self.observable_name).hostname
+
+ # Configure resolver with both nameservers
+ resolver = dns.resolver.Resolver()
+ resolver.nameservers = ["156.154.70.2", "156.154.71.2"]
+ resolver.timeout = 10 # Time per server
+ resolver.lifetime = 20 # Total time for all attempts
+
+ sinkhole_range = ipaddress.ip_network("156.154.112.0/23")
+
+ try:
+ answers = resolver.resolve(observable, "A")
+ for rdata in answers:
+ resolution = rdata.to_text()
+ # Check if the resolution falls in the sinkhole range
+ if ipaddress.ip_address(resolution) in sinkhole_range:
+ is_malicious = True
+ break
+
+ except dns.exception.Timeout:
+ raise AnalyzerRunException(
+ "Connection to UltraDNS failed - both servers timed out"
+ )
+ except Exception as e:
+ raise Exception(f"DNS query failed: {e}")
+
+ return malicious_detector_response(self.observable_name, is_malicious)
+
+ @classmethod
+ def _monkeypatch(cls):
+ patches = []
+ return super()._monkeypatch(patches=patches)
diff --git a/api_app/analyzers_manager/observable_analyzers/dns/dns_resolvers/ultradns_dns_resolver.py b/api_app/analyzers_manager/observable_analyzers/dns/dns_resolvers/ultradns_dns_resolver.py
new file mode 100644
index 0000000000..40dccb79cd
--- /dev/null
+++ b/api_app/analyzers_manager/observable_analyzers/dns/dns_resolvers/ultradns_dns_resolver.py
@@ -0,0 +1,64 @@
+# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl
+# See the file 'LICENSE' for copying permission.
+
+"""UltraDNS resolver implementation"""
+
+import logging
+from urllib.parse import urlparse
+
+import dns.resolver
+
+from api_app.analyzers_manager import classes
+
+from ..dns_responses import dns_resolver_response
+
+logger = logging.getLogger(__name__)
+
+
+class UltraDNSDNSResolver(classes.ObservableAnalyzer):
+ """Resolve a DNS query with UltraDNS servers"""
+
+ query_type: str
+
+ def update(self) -> bool:
+ pass
+
+ def run(self):
+
+ resolutions = []
+ observable = self.observable_name
+ if self.observable_classification == self.ObservableTypes.URL:
+ observable = urlparse(self.observable_name).hostname
+ resolver = dns.resolver.Resolver()
+
+ # Configure UltraDNS servers
+ resolver.nameservers = ["64.6.64.6", "64.6.65.6"]
+ resolver.timeout = 10
+ resolver.lifetime = 20
+
+ try:
+ dns_resolutions = resolver.resolve(observable, self.query_type)
+ for resolution in dns_resolutions:
+ element = {
+ "TTL": dns_resolutions.rrset.ttl,
+ "data": resolution.to_text(),
+ "name": dns_resolutions.qname.to_text(),
+ "type": dns_resolutions.rdtype,
+ }
+ resolutions.append(element)
+ except (
+ dns.resolver.NXDOMAIN,
+ dns.resolver.NoAnswer,
+ dns.resolver.NoNameservers,
+ ):
+ logger.info(
+ "No resolution for "
+ f"{self.observable_classification} {self.observable_name}"
+ )
+
+ return dns_resolver_response(self.observable_name, resolutions)
+
+ @classmethod
+ def _monkeypatch(cls):
+ patches = []
+ return super()._monkeypatch(patches=patches)
diff --git a/api_app/analyzers_manager/observable_analyzers/feodo_tracker.py b/api_app/analyzers_manager/observable_analyzers/feodo_tracker.py
index c7369f2d45..99a5a016c3 100644
--- a/api_app/analyzers_manager/observable_analyzers/feodo_tracker.py
+++ b/api_app/analyzers_manager/observable_analyzers/feodo_tracker.py
@@ -11,12 +11,14 @@
from api_app.analyzers_manager import classes
from api_app.analyzers_manager.exceptions import AnalyzerRunException
+from api_app.mixins import AbuseCHMixin
+from api_app.models import PluginConfig
from tests.mock_utils import MockUpResponse, if_mock_connections, patch
logger = logging.getLogger(__name__)
-class Feodo_Tracker(classes.ObservableAnalyzer):
+class Feodo_Tracker(AbuseCHMixin, classes.ObservableAnalyzer):
"""
Feodo Tracker offers various blocklists,
helping network owners to protect their
@@ -65,6 +67,22 @@ def run(self):
raise AnalyzerRunException(f"Key error in run: {e}")
return result
+ # this is necessary because during the "update()" flow the config()
+ # method is not called and the attributes would not be accessible by "cls"
+ @classmethod
+ def get_service_auth_headers(cls) -> {}:
+ for plugin in PluginConfig.objects.filter(
+ parameter__python_module=cls.python_module,
+ parameter__is_secret=True,
+ parameter__name="service_api_key",
+ ):
+ if plugin.value:
+ logger.debug("Found auth key for feodo tracker update")
+ return {"Auth-Key": plugin.value}
+
+ logger.debug("Not found auth key for feodo tracker update")
+ return {}
+
@classmethod
def update(cls) -> bool:
"""
@@ -74,7 +92,7 @@ def update(cls) -> bool:
logger.info(f"starting download of db from {db_url}")
try:
- r = requests.get(db_url)
+ r = requests.get(db_url, headers=cls.get_service_auth_headers())
r.raise_for_status()
except requests.RequestException:
return False
diff --git a/api_app/analyzers_manager/observable_analyzers/mb_get.py b/api_app/analyzers_manager/observable_analyzers/mb_get.py
index 8d9dc5d758..fbd1c37de5 100644
--- a/api_app/analyzers_manager/observable_analyzers/mb_get.py
+++ b/api_app/analyzers_manager/observable_analyzers/mb_get.py
@@ -1,27 +1,40 @@
# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl
# See the file 'LICENSE' for copying permission.
+import logging
import requests
from api_app.analyzers_manager import classes
+from api_app.mixins import AbuseCHMixin
from tests.mock_utils import MockUpResponse, if_mock_connections, patch
+logger = logging.getLogger(__name__)
-class MB_GET(classes.ObservableAnalyzer):
+
+class MB_GET(AbuseCHMixin, classes.ObservableAnalyzer):
url: str = "https://mb-api.abuse.ch/api/v1/"
sample_url: str = "https://bazaar.abuse.ch/sample/"
+ def update(self) -> bool:
+ pass
+
def run(self):
- return self.query_mb_api(observable_name=self.observable_name)
+ return self.query_mb_api(
+ observable_name=self.observable_name,
+ headers=self.authentication_header,
+ )
@classmethod
- def query_mb_api(cls, observable_name: str) -> dict:
+ def query_mb_api(cls, observable_name: str, headers: dict = None) -> dict:
"""
This is in a ``classmethod`` so it can be reused in ``MB_GOOGLE``.
"""
post_data = {"query": "get_info", "hash": observable_name}
- response = requests.post(cls.url, data=post_data)
+ if headers is None:
+ headers = {}
+
+ response = requests.post(cls.url, data=post_data, headers=headers)
response.raise_for_status()
result = response.json()
diff --git a/api_app/analyzers_manager/observable_analyzers/mb_google.py b/api_app/analyzers_manager/observable_analyzers/mb_google.py
index de785d2d41..71cbfc9ef4 100644
--- a/api_app/analyzers_manager/observable_analyzers/mb_google.py
+++ b/api_app/analyzers_manager/observable_analyzers/mb_google.py
@@ -11,13 +11,19 @@ class MB_GOOGLE(MB_GET):
This is a modified version of MB_GET.
"""
+ def update(self) -> bool:
+ pass
+
def run(self):
results = {}
query = f"{self.observable_name} site:bazaar.abuse.ch"
for url in googlesearch.search(query, stop=20):
mb_hash = url.split("/")[-2]
- res = super().query_mb_api(observable_name=mb_hash)
+ res = super().query_mb_api(
+ observable_name=mb_hash,
+ headers=self.authentication_header,
+ )
results[mb_hash] = res
return results
diff --git a/api_app/analyzers_manager/observable_analyzers/nuclei.py b/api_app/analyzers_manager/observable_analyzers/nuclei.py
new file mode 100644
index 0000000000..1fc6b780ed
--- /dev/null
+++ b/api_app/analyzers_manager/observable_analyzers/nuclei.py
@@ -0,0 +1,60 @@
+# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl
+# See the file 'LICENSE' for copying permission.
+import logging
+
+from api_app.analyzers_manager.classes import DockerBasedAnalyzer, ObservableAnalyzer
+
+logger = logging.getLogger(__name__)
+
+
+class NucleiAnalyzer(ObservableAnalyzer, DockerBasedAnalyzer):
+ url: str = "http://nuclei_analyzer:4008/run-nuclei"
+ template_dirs: list
+ max_tries: int = 40
+ poll_distance: int = 30
+
+ @classmethod
+ def update(cls) -> bool:
+ pass
+
+ def run(self):
+ """
+ Prepares and executes a Nuclei scan through the Docker-based API.
+ """
+ VALID_TEMPLATE_CATEGORIES = {
+ "cloud",
+ "code",
+ "cves",
+ "vulnerabilities",
+ "dns",
+ "file",
+ "headless",
+ "helpers",
+ "http",
+ "javascript",
+ "network",
+ "passive",
+ "profiles",
+ "ssl",
+ "workflows",
+ "exposures",
+ }
+
+ args = [self.observable_name]
+
+ # Append valid template directories with the "-t" flag
+ for template_dir in self.template_dirs:
+ if template_dir in VALID_TEMPLATE_CATEGORIES:
+ args.extend(["-t", template_dir])
+ else:
+ warning = f"Skipping invalid template directory: {template_dir} for observable {self.observable_name}"
+ logger.warning(warning)
+ self.report.errors.append(warning)
+ req_data = {"args": args}
+
+ # Execute the request
+ response = self._docker_run(req_data=req_data, req_files=None)
+
+ analysis = response.get("data", [])
+
+ return analysis
diff --git a/api_app/analyzers_manager/observable_analyzers/threatfox.py b/api_app/analyzers_manager/observable_analyzers/threatfox.py
index 512a1e6960..c5af095ce9 100644
--- a/api_app/analyzers_manager/observable_analyzers/threatfox.py
+++ b/api_app/analyzers_manager/observable_analyzers/threatfox.py
@@ -2,14 +2,18 @@
# See the file 'LICENSE' for copying permission.
import json
+import logging
import requests
from api_app.analyzers_manager import classes
+from api_app.mixins import AbuseCHMixin
from tests.mock_utils import MockUpResponse, if_mock_connections, patch
+logger = logging.getLogger(__name__)
-class ThreatFox(classes.ObservableAnalyzer):
+
+class ThreatFox(AbuseCHMixin, classes.ObservableAnalyzer):
url: str = "https://threatfox-api.abuse.ch/api/v1/"
disable: bool = False # optional
@@ -22,7 +26,11 @@ def run(self):
payload = {"query": "search_ioc", "search_term": self.observable_name}
- response = requests.post(self.url, data=json.dumps(payload))
+ response = requests.post(
+ self.url,
+ data=json.dumps(payload),
+ headers=self.authentication_header,
+ )
response.raise_for_status()
result = response.json()
diff --git a/api_app/analyzers_manager/observable_analyzers/thug_url.py b/api_app/analyzers_manager/observable_analyzers/thug_url.py
index 3e64b56c1e..aa04ea743c 100644
--- a/api_app/analyzers_manager/observable_analyzers/thug_url.py
+++ b/api_app/analyzers_manager/observable_analyzers/thug_url.py
@@ -8,7 +8,7 @@
class ThugUrl(ObservableAnalyzer, DockerBasedAnalyzer):
name: str = "Thug"
- url: str = "http://malware_tools_analyzers:4002/thug"
+ url: str = "http://thug:4002/thug"
# http request polling max number of tries
max_tries: int = 15
# interval between http request polling (in seconds)
@@ -23,6 +23,11 @@ class ThugUrl(ObservableAnalyzer, DockerBasedAnalyzer):
def _thug_args_builder(self):
user_agent = self.user_agent
+ if not user_agent:
+ user_agent = (
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
+ "(KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36 Edg/131.0.2903.86"
+ )
dom_events = self.dom_events
use_proxy = self.use_proxy
proxy = self.proxy
@@ -48,6 +53,7 @@ def run(self):
tmp_dir = secrets.token_hex(4)
tmp_dir_full_path = "/opt/deploy/thug" + tmp_dir
# make request data
+ # the option -n is bugged and does not work https://github.com/intelowlproject/IntelOwl/issues/2656
args.extend(["-n", tmp_dir_full_path, self.observable_name])
req_data = {
diff --git a/api_app/analyzers_manager/observable_analyzers/urlhaus.py b/api_app/analyzers_manager/observable_analyzers/urlhaus.py
index 94d79566cf..97fd172313 100644
--- a/api_app/analyzers_manager/observable_analyzers/urlhaus.py
+++ b/api_app/analyzers_manager/observable_analyzers/urlhaus.py
@@ -1,14 +1,18 @@
# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl
# See the file 'LICENSE' for copying permission.
+import logging
import requests
-from api_app.analyzers_manager import classes
+from api_app.analyzers_manager.classes import ObservableAnalyzer
from api_app.analyzers_manager.exceptions import AnalyzerRunException
+from api_app.mixins import AbuseCHMixin
from tests.mock_utils import MockUpResponse, if_mock_connections, patch
+logger = logging.getLogger(__name__)
-class URLHaus(classes.ObservableAnalyzer):
+
+class URLHaus(AbuseCHMixin, ObservableAnalyzer):
url = "https://urlhaus-api.abuse.ch/v1/"
disable: bool = False # optional
@@ -34,7 +38,11 @@ def run(self):
f"not supported observable type {self.observable_classification}."
)
- response = requests.post(self.url + uri, data=post_data, headers=headers)
+ response = requests.post(
+ self.url + uri,
+ data=post_data,
+ headers=self.authentication_header | headers,
+ )
response.raise_for_status()
return response.json()
diff --git a/api_app/analyzers_manager/observable_analyzers/wad.py b/api_app/analyzers_manager/observable_analyzers/wad.py
new file mode 100644
index 0000000000..7fbbe065f8
--- /dev/null
+++ b/api_app/analyzers_manager/observable_analyzers/wad.py
@@ -0,0 +1,55 @@
+# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl
+# See the file 'LICENSE' for copying permission.
+
+import logging
+
+from wad.detection import Detector
+
+from api_app.analyzers_manager import classes
+from api_app.analyzers_manager.exceptions import AnalyzerRunException
+from tests.mock_utils import if_mock_connections, patch
+
+logger = logging.getLogger(__name__)
+
+
+class WAD(classes.ObservableAnalyzer):
+ """
+ This analyzer is a wrapper for the WAD (Web Application Detector) project.
+ """
+
+ @classmethod
+ def update(cls) -> bool:
+ pass
+
+ def run(self):
+ logger.info(f"Running WAD Analyzer for {self.observable_name}")
+
+ detector = Detector()
+
+ results = detector.detect(url=self.observable_name)
+
+ if results:
+ return results
+ else:
+ raise AnalyzerRunException("no results returned for the provided url")
+
+ @classmethod
+ def _monkeypatch(cls):
+ patches = [
+ if_mock_connections(
+ patch.object(
+ Detector,
+ "detect",
+ return_value={
+ "https://www.google.com/": [
+ {
+ "app": "Google Web Server",
+ "ver": "null",
+ "type": "Web Servers",
+ }
+ ]
+ },
+ ),
+ )
+ ]
+ return super()._monkeypatch(patches=patches)
diff --git a/api_app/analyzers_manager/observable_analyzers/yaraify.py b/api_app/analyzers_manager/observable_analyzers/yaraify.py
index 7a0f0bbce4..a1191d8ec7 100644
--- a/api_app/analyzers_manager/observable_analyzers/yaraify.py
+++ b/api_app/analyzers_manager/observable_analyzers/yaraify.py
@@ -1,19 +1,26 @@
# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl
# See the file 'LICENSE' for copying permission.
+import logging
import requests
from api_app.analyzers_manager.classes import ObservableAnalyzer
+from api_app.mixins import AbuseCHMixin
from tests.mock_utils import MockUpResponse, if_mock_connections, patch
+logger = logging.getLogger(__name__)
-class YARAify(ObservableAnalyzer):
+
+class YARAify(AbuseCHMixin, ObservableAnalyzer):
url: str = "https://yaraify-api.abuse.ch/api/v1/"
query: str
result_max: int
_api_key_name: str
+ def update(self) -> bool:
+ pass
+
def run(self):
data = {"search_term": self.observable_name, "query": self.query}
@@ -23,7 +30,9 @@ def run(self):
if getattr(self, "_api_key_name", None):
data["malpedia-token"] = self._api_key_name
- response = requests.post(self.url, json=data)
+ response = requests.post(
+ self.url, json=data, headers=self.authentication_header
+ )
response.raise_for_status()
result = response.json()
diff --git a/api_app/ingestors_manager/ingestors/greedybear.py b/api_app/ingestors_manager/ingestors/greedybear.py
new file mode 100644
index 0000000000..5eced50fa1
--- /dev/null
+++ b/api_app/ingestors_manager/ingestors/greedybear.py
@@ -0,0 +1,100 @@
+import ipaddress
+import logging
+from typing import Any, Iterable
+from unittest.mock import patch
+
+import requests
+
+from api_app.ingestors_manager.classes import Ingestor
+from api_app.ingestors_manager.exceptions import (
+ IngestorConfigurationException,
+ IngestorRunException,
+)
+from tests.mock_utils import MockUpResponse, if_mock_connections
+
+logger = logging.getLogger(__name__)
+
+
+class GreedyBear(Ingestor):
+
+ url: str
+ feed_type: str
+ attack_type: str
+ age: str
+
+ VALID_FEED_TYPES = {"log4j", "cowrie", "all"}
+ VALID_ATTACK_TYPES = {"scanner", "payload_request", "all"}
+ VALID_AGE = {"recent", "persistent"}
+
+ @classmethod
+ def update(cls) -> bool:
+ pass
+
+ def run(self) -> Iterable[Any]:
+ if self.feed_type not in self.VALID_FEED_TYPES:
+ raise IngestorConfigurationException(
+ f"Invalid feed_type: {self.feed_type}. Must be one of {self.VALID_FEED_TYPES}"
+ )
+ if self.attack_type not in self.VALID_ATTACK_TYPES:
+ raise IngestorConfigurationException(
+ f"Invalid attack_type: {self.attack_type}. Must be one of {self.VALID_ATTACK_TYPES}"
+ )
+ if self.age not in self.VALID_AGE:
+ raise IngestorConfigurationException(
+ f"Invalid age: {self.age}. Must be one of {self.VALID_AGE}"
+ )
+
+ req_url = (
+ f"{self.url}/api/feeds/{self.feed_type}/{self.attack_type}/{self.age}.json"
+ )
+ result = requests.get(req_url)
+ result.raise_for_status()
+ content = result.json()
+ if not isinstance(content.get("iocs"), list):
+ raise IngestorRunException(f"Content {content} not expected")
+
+ limit = min(len(content["iocs"]), self.limit)
+ for elem in content["iocs"][:limit]:
+ value = elem.get("value")
+ try:
+ ipaddress.ip_address(value)
+ yield value
+ except ValueError:
+ pass
+
+ @classmethod
+ def _monkeypatch(cls):
+ patches = [
+ if_mock_connections(
+ patch(
+ "requests.get",
+ return_value=MockUpResponse(
+ {
+ "license": "https://github.com/honeynet/GreedyBear/blob/main/FEEDS_LICENSE.md",
+ "iocs": [
+ {
+ "feed_type": "suricata",
+ "value": "91.205.219.185",
+ "scanner": True,
+ "payload_request": False,
+ "first_seen": "2024-05-29",
+ "last_seen": "2025-02-01",
+ "times_seen": 6437,
+ },
+ {
+ "feed_type": "suricata",
+ "value": "88.210.32.15",
+ "scanner": True,
+ "payload_request": False,
+ "first_seen": "2024-07-30",
+ "last_seen": "2025-02-01",
+ "times_seen": 61,
+ },
+ ],
+ },
+ 200,
+ ),
+ ),
+ )
+ ]
+ return super()._monkeypatch(patches=patches)
diff --git a/api_app/ingestors_manager/ingestors/malware_bazaar.py b/api_app/ingestors_manager/ingestors/malware_bazaar.py
index ddd6364b79..951586f22c 100644
--- a/api_app/ingestors_manager/ingestors/malware_bazaar.py
+++ b/api_app/ingestors_manager/ingestors/malware_bazaar.py
@@ -10,18 +10,21 @@
from api_app.ingestors_manager.classes import Ingestor
from api_app.ingestors_manager.exceptions import IngestorRunException
+from api_app.mixins import AbuseCHMixin
from tests.mock_utils import MockUpResponse, if_mock_connections
logger = logging.getLogger(__name__)
-class MalwareBazaar(Ingestor):
+class MalwareBazaar(AbuseCHMixin, Ingestor):
# API endpoint
url: str
# Download samples that are up to X hours old
hours: int
# Download samples from chosen signatures (aka malware families)
signatures: str
+ # Max number of results you want to display
+ limit: int
@classmethod
def update(cls) -> bool:
@@ -31,7 +34,8 @@ def update(cls) -> bool:
def get_signature_information(self, signature):
result = requests.post(
self.url,
- data={"query": "get_siginfo", "signature": signature, "limit": 100},
+ data={"query": "get_siginfo", "signature": signature, "limit": self.limit},
+ headers=self.authentication_header,
timeout=30,
)
result.raise_for_status()
@@ -51,6 +55,12 @@ def get_recent_samples(self):
current_time = timezone.now()
for signature in self.signatures:
data = self.get_signature_information(signature)
+ hours_str = "hour" if self.hours == 1 else "hours"
+ if len(data) > self.limit:
+ logger.info(
+ f"{signature}: in the last {hours_str} there are "
+ f"more results than the limit {len(data)}/{self.limit}"
+ )
for elem in data:
first_seen = timezone.make_aware(
timezone.datetime.strptime(elem["first_seen"], "%Y-%m-%d %H:%M:%S")
@@ -58,13 +68,6 @@ def get_recent_samples(self):
diff = int((current_time - first_seen).total_seconds()) // 3600
if elem["signature"] == signature and diff <= self.hours:
hashes.add(elem["sha256_hash"])
-
- last_hours_str = (
- "Last hour" if self.hours == 1 else f"Last {self.hours} hours"
- )
- logger.info(
- f"{last_hours_str} {signature} samples: {len(hashes)}/{len(data)}"
- )
return hashes
def download_sample(self, h):
@@ -75,6 +78,7 @@ def download_sample(self, h):
"query": "get_file",
"sha256_hash": h,
},
+ headers=self.authentication_header,
timeout=60,
)
sample_archive.raise_for_status()
diff --git a/api_app/ingestors_manager/ingestors/threatfox.py b/api_app/ingestors_manager/ingestors/threatfox.py
index 3c2f72d1e7..c27cc44cfe 100644
--- a/api_app/ingestors_manager/ingestors/threatfox.py
+++ b/api_app/ingestors_manager/ingestors/threatfox.py
@@ -6,12 +6,13 @@
from api_app.ingestors_manager.classes import Ingestor
from api_app.ingestors_manager.exceptions import IngestorRunException
+from api_app.mixins import AbuseCHMixin
from tests.mock_utils import MockUpResponse, if_mock_connections
logger = logging.getLogger(__name__)
-class ThreatFox(Ingestor):
+class ThreatFox(AbuseCHMixin, Ingestor):
# API endpoint
url = "https://threatfox-api.abuse.ch/api/v1/"
# Days to check. From 1 to 7
@@ -22,7 +23,11 @@ def update(cls) -> bool:
pass
def run(self) -> Iterable[Any]:
- result = requests.post(self.url, json={"query": "get_iocs", "days": self.days})
+ result = requests.post(
+ self.url,
+ json={"query": "get_iocs", "days": self.days},
+ headers=self.authentication_header,
+ )
result.raise_for_status()
content = result.json()
logger.info(f"ThreatFox data is {content}")
diff --git a/api_app/ingestors_manager/migrations/0026_alter_ingestor_config_malware_bazaar_threatfox.py b/api_app/ingestors_manager/migrations/0026_alter_ingestor_config_malware_bazaar_threatfox.py
new file mode 100644
index 0000000000..46247909a2
--- /dev/null
+++ b/api_app/ingestors_manager/migrations/0026_alter_ingestor_config_malware_bazaar_threatfox.py
@@ -0,0 +1,57 @@
+from django.db import migrations
+
+
+def migrate(apps, schema_editor):
+ Parameter = apps.get_model("api_app", "Parameter")
+ PythonModule = apps.get_model("api_app", "PythonModule")
+
+ ingestors = [
+ "malware_bazaar.MalwareBazaar",
+ "threatfox.ThreatFox",
+ ]
+ for ingestor in ingestors:
+ module = PythonModule.objects.get(
+ module=ingestor,
+ base_path="api_app.ingestors_manager.ingestors",
+ )
+ Parameter.objects.create(
+ name="service_api_key",
+ type="str",
+ description="Optional API key to connect to abuse.ch services.",
+ is_secret=True,
+ required=False,
+ python_module=module,
+ )
+
+
+def reverse_migrate(apps, schema_editor):
+ Parameter = apps.get_model("api_app", "Parameter")
+ PythonModule = apps.get_model("api_app", "PythonModule")
+
+ ingestors = [
+ "malware_bazaar.MalwareBazaar",
+ "threatfox.ThreatFox",
+ ]
+ for ingestor in ingestors:
+ module = PythonModule.objects.get(
+ module=ingestor,
+ base_path="api_app.ingestors_manager.ingestors",
+ )
+ Parameter.objects.get(
+ name="service_api_key",
+ type="str",
+ description="Optional API key to connect to abuse.ch services.",
+ is_secret=True,
+ required=False,
+ python_module=module,
+ ).delete()
+
+
+class Migration(migrations.Migration):
+ atomic = False
+ dependencies = [
+ ("api_app", "0065_job_mpnodesearch"),
+ ("ingestors_manager", "0025_ingestor_config_virustotal_example_query"),
+ ]
+
+ operations = [migrations.RunPython(migrate, reverse_migrate)]
diff --git a/api_app/ingestors_manager/migrations/0027_added_limit_parameter_malware_bazaar_threatfox.py b/api_app/ingestors_manager/migrations/0027_added_limit_parameter_malware_bazaar_threatfox.py
new file mode 100644
index 0000000000..23273a09a4
--- /dev/null
+++ b/api_app/ingestors_manager/migrations/0027_added_limit_parameter_malware_bazaar_threatfox.py
@@ -0,0 +1,80 @@
+from django.db import migrations
+
+
+def migrate(apps, schema_editor):
+ Parameter = apps.get_model("api_app", "Parameter")
+ PythonModule = apps.get_model("api_app", "PythonModule")
+ IngestorConfig = apps.get_model("ingestors_manager", "IngestorConfig")
+ PluginConfig = apps.get_model("api_app", "PluginConfig")
+
+ ingestors = [
+ "malware_bazaar.MalwareBazaar",
+ "threatfox.ThreatFox",
+ ]
+ for ingestor in ingestors:
+ module = PythonModule.objects.get(
+ module=ingestor,
+ base_path="api_app.ingestors_manager.ingestors",
+ )
+ p = Parameter.objects.create(
+ name="limit",
+ type="int",
+ description="Max number of results.",
+ is_secret=False,
+ required=True,
+ python_module=module,
+ )
+ p.full_clean()
+ p.save()
+
+ ic = IngestorConfig.objects.get(name=ingestor.split(".")[1])
+ pc = PluginConfig(
+ value=20,
+ ingestor_config=ic,
+ for_organization=False,
+ owner=None,
+ parameter=p,
+ )
+ pc.full_clean()
+ pc.save()
+
+
+def reverse_migrate(apps, schema_editor):
+ Parameter = apps.get_model("api_app", "Parameter")
+ PythonModule = apps.get_model("api_app", "PythonModule")
+ IngestorConfig = apps.get_model("ingestors_manager", "IngestorConfig")
+ PluginConfig = apps.get_model("api_app", "PluginConfig")
+
+ ingestors = [
+ "malware_bazaar.MalwareBazaar",
+ "threatfox.ThreatFox",
+ ]
+ for ingestor in ingestors:
+ module = PythonModule.objects.get(
+ module=ingestor,
+ base_path="api_app.ingestors_manager.ingestors",
+ )
+ ic = IngestorConfig.objects.get(name=ingestor.split(".")[1])
+ p = Parameter.objects.get(
+ name="limit",
+ type="int",
+ description="Max number of results.",
+ is_secret=False,
+ required=True,
+ python_module=module,
+ )
+ PluginConfig.objects.get(
+ parameter=p,
+ ingestor_config=ic,
+ ).delete()
+ p.delete()
+
+
+class Migration(migrations.Migration):
+ atomic = False
+ dependencies = [
+ ("api_app", "0065_job_mpnodesearch"),
+ ("ingestors_manager", "0026_alter_ingestor_config_malware_bazaar_threatfox"),
+ ]
+
+ operations = [migrations.RunPython(migrate, reverse_migrate)]
diff --git a/api_app/ingestors_manager/migrations/0028_ingestor_config_greedybear.py b/api_app/ingestors_manager/migrations/0028_ingestor_config_greedybear.py
new file mode 100644
index 0000000000..4d29004698
--- /dev/null
+++ b/api_app/ingestors_manager/migrations/0028_ingestor_config_greedybear.py
@@ -0,0 +1,332 @@
+from django.db import migrations
+from django.db.models.fields.related_descriptors import (
+ ForwardManyToOneDescriptor,
+ ForwardOneToOneDescriptor,
+ ManyToManyDescriptor,
+ ReverseManyToOneDescriptor,
+ ReverseOneToOneDescriptor,
+)
+
+plugin = {
+ "python_module": {
+ "health_check_schedule": None,
+ "update_schedule": {
+ "minute": "0",
+ "hour": "0",
+ "day_of_week": "*",
+ "day_of_month": "*",
+ "month_of_year": "*",
+ },
+ "module": "greedybear.GreedyBear",
+ "base_path": "api_app.ingestors_manager.ingestors",
+ },
+ "schedule": {
+ "minute": "0",
+ "hour": "0",
+ "day_of_week": "*",
+ "day_of_month": "*",
+ "month_of_year": "*",
+ },
+ "periodic_task": {
+ "crontab": {
+ "minute": "0",
+ "hour": "0",
+ "day_of_week": "*",
+ "day_of_month": "*",
+ "month_of_year": "*",
+ },
+ "name": "GreedyBearIngestor",
+ "task": "intel_owl.tasks.execute_ingestor",
+ "kwargs": '{"config_name": "GreedyBear"}',
+ "queue": "default",
+ "enabled": False,
+ },
+ "user": {
+ "username": "GreedyBearIngestor",
+ "profile": {
+ "user": {
+ "username": "GreedyBearIngestor",
+ "email": "",
+ "first_name": "",
+ "last_name": "",
+ "password": "",
+ "is_active": True,
+ },
+ "company_name": "",
+ "company_role": "",
+ "twitter_handle": "",
+ "discover_from": "other",
+ "task_priority": 7,
+ "is_robot": True,
+ },
+ },
+ "playbooks_choice": ["Popular_IP_Reputation_Services"],
+ "name": "GreedyBear",
+ "description": "Queries feeds which are generated by the [GreedyBear Project](https://intelowlproject.github.io/docs/GreedyBear/Introduction/).",
+ "disabled": True,
+ "soft_time_limit": 60,
+ "routing_key": "ingestor",
+ "health_check_status": True,
+ "maximum_jobs": 50,
+ "delay": "00:00:00",
+ "model": "ingestors_manager.IngestorConfig",
+}
+
+params = [
+ {
+ "python_module": {
+ "module": "greedybear.GreedyBear",
+ "base_path": "api_app.ingestors_manager.ingestors",
+ },
+ "name": "url",
+ "type": "str",
+ "description": "API endpoint",
+ "is_secret": False,
+ "required": False,
+ },
+ {
+ "python_module": {
+ "module": "greedybear.GreedyBear",
+ "base_path": "api_app.ingestors_manager.ingestors",
+ },
+ "name": "limit",
+ "type": "int",
+ "description": "Max number of results.",
+ "is_secret": False,
+ "required": False,
+ },
+ {
+ "python_module": {
+ "module": "greedybear.GreedyBear",
+ "base_path": "api_app.ingestors_manager.ingestors",
+ },
+ "name": "feed_type",
+ "type": "str",
+ "description": "The available feed types are log4j, cowrie, and all.",
+ "is_secret": False,
+ "required": False,
+ },
+ {
+ "python_module": {
+ "module": "greedybear.GreedyBear",
+ "base_path": "api_app.ingestors_manager.ingestors",
+ },
+ "name": "attack_type",
+ "type": "str",
+ "description": "The available attack_type are scanner, payload_request, and all.",
+ "is_secret": False,
+ "required": False,
+ },
+ {
+ "python_module": {
+ "module": "greedybear.GreedyBear",
+ "base_path": "api_app.ingestors_manager.ingestors",
+ },
+ "name": "age",
+ "type": "str",
+ "description": "The available age are recent and persistent.",
+ "is_secret": False,
+ "required": False,
+ },
+]
+
+values = [
+ {
+ "parameter": {
+ "python_module": {
+ "module": "greedybear.GreedyBear",
+ "base_path": "api_app.ingestors_manager.ingestors",
+ },
+ "name": "url",
+ "type": "str",
+ "description": "API endpoint",
+ "is_secret": False,
+ "required": False,
+ },
+ "analyzer_config": None,
+ "connector_config": None,
+ "visualizer_config": None,
+ "ingestor_config": "GreedyBear",
+ "pivot_config": None,
+ "for_organization": False,
+ "value": "https://greedybear.honeynet.org",
+ "updated_at": "2025-02-10T12:56:17.294680Z",
+ "owner": None,
+ },
+ {
+ "parameter": {
+ "python_module": {
+ "module": "greedybear.GreedyBear",
+ "base_path": "api_app.ingestors_manager.ingestors",
+ },
+ "name": "limit",
+ "type": "int",
+ "description": "Max number of results.",
+ "is_secret": False,
+ "required": False,
+ },
+ "analyzer_config": None,
+ "connector_config": None,
+ "visualizer_config": None,
+ "ingestor_config": "GreedyBear",
+ "pivot_config": None,
+ "for_organization": False,
+ "value": 50,
+ "updated_at": "2025-02-10T12:56:17.302177Z",
+ "owner": None,
+ },
+ {
+ "parameter": {
+ "python_module": {
+ "module": "greedybear.GreedyBear",
+ "base_path": "api_app.ingestors_manager.ingestors",
+ },
+ "name": "feed_type",
+ "type": "str",
+ "description": "The available feed types are log4j, cowrie, and all.",
+ "is_secret": False,
+ "required": False,
+ },
+ "analyzer_config": None,
+ "connector_config": None,
+ "visualizer_config": None,
+ "ingestor_config": "GreedyBear",
+ "pivot_config": None,
+ "for_organization": False,
+ "value": "all",
+ "updated_at": "2025-02-10T12:56:17.309549Z",
+ "owner": None,
+ },
+ {
+ "parameter": {
+ "python_module": {
+ "module": "greedybear.GreedyBear",
+ "base_path": "api_app.ingestors_manager.ingestors",
+ },
+ "name": "attack_type",
+ "type": "str",
+ "description": "The available attack_type are scanner, payload_request, and all.",
+ "is_secret": False,
+ "required": False,
+ },
+ "analyzer_config": None,
+ "connector_config": None,
+ "visualizer_config": None,
+ "ingestor_config": "GreedyBear",
+ "pivot_config": None,
+ "for_organization": False,
+ "value": "all",
+ "updated_at": "2025-02-10T12:56:17.316766Z",
+ "owner": None,
+ },
+ {
+ "parameter": {
+ "python_module": {
+ "module": "greedybear.GreedyBear",
+ "base_path": "api_app.ingestors_manager.ingestors",
+ },
+ "name": "age",
+ "type": "str",
+ "description": "The available age are recent and persistent.",
+ "is_secret": False,
+ "required": False,
+ },
+ "analyzer_config": None,
+ "connector_config": None,
+ "visualizer_config": None,
+ "ingestor_config": "GreedyBear",
+ "pivot_config": None,
+ "for_organization": False,
+ "value": "recent",
+ "updated_at": "2025-02-10T12:56:17.324439Z",
+ "owner": None,
+ },
+]
+
+
+def _get_real_obj(Model, field, value):
+ def _get_obj(Model, other_model, value):
+ if isinstance(value, dict):
+ real_vals = {}
+ for key, real_val in value.items():
+ real_vals[key] = _get_real_obj(other_model, key, real_val)
+ value = other_model.objects.get_or_create(**real_vals)[0]
+ # it is just the primary key serialized
+ else:
+ if isinstance(value, int):
+ if Model.__name__ == "PluginConfig":
+ value = other_model.objects.get(name=plugin["name"])
+ else:
+ value = other_model.objects.get(pk=value)
+ else:
+ value = other_model.objects.get(name=value)
+ return value
+
+ if (
+ type(getattr(Model, field))
+ in [
+ ForwardManyToOneDescriptor,
+ ReverseManyToOneDescriptor,
+ ReverseOneToOneDescriptor,
+ ForwardOneToOneDescriptor,
+ ]
+ and value
+ ):
+ other_model = getattr(Model, field).get_queryset().model
+ value = _get_obj(Model, other_model, value)
+ elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value:
+ other_model = getattr(Model, field).rel.model
+ value = [_get_obj(Model, other_model, val) for val in value]
+ return value
+
+
+def _create_object(Model, data):
+ mtm, no_mtm = {}, {}
+ for field, value in data.items():
+ value = _get_real_obj(Model, field, value)
+ if type(getattr(Model, field)) is ManyToManyDescriptor:
+ mtm[field] = value
+ else:
+ no_mtm[field] = value
+ try:
+ o = Model.objects.get(**no_mtm)
+ except Model.DoesNotExist:
+ o = Model(**no_mtm)
+ o.full_clean()
+ o.save()
+ for field, value in mtm.items():
+ attribute = getattr(o, field)
+ if value is not None:
+ attribute.set(value)
+ return False
+ return True
+
+
+def migrate(apps, schema_editor):
+ Parameter = apps.get_model("api_app", "Parameter")
+ PluginConfig = apps.get_model("api_app", "PluginConfig")
+ python_path = plugin.pop("model")
+ Model = apps.get_model(*python_path.split("."))
+ if not Model.objects.filter(name=plugin["name"]).exists():
+ exists = _create_object(Model, plugin)
+ if not exists:
+ for param in params:
+ _create_object(Parameter, param)
+ for value in values:
+ _create_object(PluginConfig, value)
+
+
+def reverse_migrate(apps, schema_editor):
+ python_path = plugin.pop("model")
+ Model = apps.get_model(*python_path.split("."))
+ Model.objects.get(name=plugin["name"]).delete()
+
+
+class Migration(migrations.Migration):
+ atomic = False
+ dependencies = [
+ ("api_app", "0065_job_mpnodesearch"),
+ ("ingestors_manager", "0027_added_limit_parameter_malware_bazaar_threatfox"),
+ ]
+
+ operations = [migrations.RunPython(migrate, reverse_migrate)]
diff --git a/api_app/mixins.py b/api_app/mixins.py
index fabe9e2092..96eb96f9cf 100644
--- a/api_app/mixins.py
+++ b/api_app/mixins.py
@@ -671,3 +671,16 @@ def _vt_get_report(
result["link"] = f"https://www.virustotal.com/gui/{uri_prefix}/{uri_postfix}"
return result
+
+
+class AbuseCHMixin:
+ # API key to access abuse.ch services
+ _service_api_key: str
+
+ @property
+ def authentication_header(self) -> dict:
+ if hasattr(self, "_service_api_key") and self._service_api_key:
+ logger.debug("Found auth key for abuse.ch request")
+ return {"Auth-Key": self._service_api_key}
+
+ return {}
diff --git a/api_app/pivots_manager/queryset.py b/api_app/pivots_manager/queryset.py
index 9ccf9ad1b9..b92d1967d4 100644
--- a/api_app/pivots_manager/queryset.py
+++ b/api_app/pivots_manager/queryset.py
@@ -20,7 +20,6 @@ def valid(
analyzers.values_list("pk", flat=True)
)
)
- | Q(related_analyzer_configs=None)
)
if connectors.exists():
qs = qs.many_to_many_to_array("related_connector_configs").filter(
@@ -29,7 +28,6 @@ def valid(
connectors.values_list("pk", flat=True)
)
)
- | Q(related_connector_configs=None)
)
return qs.distinct()
diff --git a/api_app/playbooks_manager/migrations/0058_add_ultradns_to_free_to_use_and_dns.py b/api_app/playbooks_manager/migrations/0058_add_ultradns_to_free_to_use_and_dns.py
new file mode 100644
index 0000000000..495c80cffd
--- /dev/null
+++ b/api_app/playbooks_manager/migrations/0058_add_ultradns_to_free_to_use_and_dns.py
@@ -0,0 +1,55 @@
+# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl
+# See the file 'LICENSE' for copying permission.
+
+
+from django.db import migrations
+
+
+def migrate(apps, schema_editor):
+ playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig")
+ AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig")
+ pc = playbook_config.objects.get(name="FREE_TO_USE_ANALYZERS")
+ pc2 = playbook_config.objects.get(name="Dns")
+ pc.analyzers.add(
+ AnalyzerConfig.objects.get(name="UltraDNS_DNS").id,
+ AnalyzerConfig.objects.get(name="UltraDNS_Malicious_Detector").id,
+ )
+ pc2.analyzers.add(
+ AnalyzerConfig.objects.get(name="UltraDNS_DNS").id,
+ AnalyzerConfig.objects.get(name="UltraDNS_Malicious_Detector").id,
+ )
+ pc.full_clean()
+ pc.save()
+ pc2.full_clean()
+ pc2.save()
+
+
+def reverse_migrate(apps, schema_editor):
+ playbook_config = apps.get_model("playbooks_manager", "PlaybookConfig")
+ AnalyzerConfig = apps.get_model("analyzers_manager", "AnalyzerConfig")
+ pc = playbook_config.objects.get(name="FREE_TO_USE_ANALYZERS")
+ pc2 = playbook_config.objects.get(name="Dns")
+
+ pc.analyzers.remove(
+ AnalyzerConfig.objects.get(name="UltraDNS_DNS").id,
+ AnalyzerConfig.objects.get(name="UltraDNS_Malicious_Detector").id,
+ )
+ pc.full_clean()
+ pc.save()
+ pc2.analyzers.remove(
+ AnalyzerConfig.objects.get(name="UltraDNS_DNS").id,
+ AnalyzerConfig.objects.get(name="UltraDNS_Malicious_Detector").id,
+ )
+ pc2.full_clean()
+ pc2.save()
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ("playbooks_manager", "0057_alter_phishing_extractor_add_domain"),
+ ("analyzers_manager", "0145_analyzer_config_ultradns_malicious_detector"),
+ ]
+
+ operations = [
+ migrations.RunPython(migrate, reverse_migrate),
+ ]
diff --git a/api_app/playbooks_manager/queryset.py b/api_app/playbooks_manager/queryset.py
index 35305c9cae..58b854e86f 100644
--- a/api_app/playbooks_manager/queryset.py
+++ b/api_app/playbooks_manager/queryset.py
@@ -30,6 +30,7 @@ def _subquery_weight_org(user: User) -> Union[Subquery, Value]:
Job.objects.prefetch_related("user")
.filter(
user__membership__organization__pk=user.membership.organization.pk,
+ user__profile__is_robot=False,
playbook_to_execute=OuterRef("pk"),
finished_analysis_time__gte=now() - datetime.timedelta(days=30),
)
@@ -46,16 +47,17 @@ def _subquery_weight_other(user: User) -> Subquery:
Job.objects.filter(
playbook_to_execute=OuterRef("pk"),
finished_analysis_time__gte=now() - datetime.timedelta(days=30),
+ user__profile__is_robot=False,
)
.exclude(
- user__membership__organization__pk=user.membership.organization.pk
+ user__membership__organization__pk=user.membership.organization.pk,
)
.annotate(count=Func(F("pk"), function="Count"))
.values("count")
)
return Subquery(
Job.objects.prefetch_related("user")
- .filter(playbook_to_execute=OuterRef("pk"))
+ .filter(playbook_to_execute=OuterRef("pk"), user__profile__is_robot=False)
.exclude(user__pk=user.pk)
.annotate(count=Func(F("pk"), function="Count"))
.values("count")
diff --git a/api_app/serializers/__init__.py b/api_app/serializers/__init__.py
index 06042c238c..2b55a43256 100644
--- a/api_app/serializers/__init__.py
+++ b/api_app/serializers/__init__.py
@@ -6,6 +6,7 @@
from api_app.interfaces import OwnershipAbstractModel
from certego_saas.apps.organization.organization import Organization
from certego_saas.ext.upload.elastic import BISerializer
+from intel_owl.settings._util import get_environment
class AbstractBIInterface(BISerializer):
@@ -36,12 +37,9 @@ def get_class_instance(instance):
@staticmethod
def get_environment(instance):
- if settings.STAGE_PRODUCTION:
- return "prod"
- elif settings.STAGE_STAGING:
- return "stag"
- else:
- return "test"
+ # we cannot pass directly the function to the serializer's field
+ # for this reason we need a function that call another function
+ return get_environment()
@staticmethod
def get_index():
diff --git a/api_app/serializers/job.py b/api_app/serializers/job.py
index f6fe283441..9d9a34db3d 100644
--- a/api_app/serializers/job.py
+++ b/api_app/serializers/job.py
@@ -124,13 +124,13 @@ class Meta:
slug_field="name",
queryset=ConnectorConfig.objects.all(),
many=True,
- default=ConnectorConfig.objects.none(),
+ default=[],
)
analyzers_requested = rfs.SlugRelatedField(
slug_field="name",
queryset=AnalyzerConfig.objects.all(),
many=True,
- default=AnalyzerConfig.objects.none(),
+ default=[],
)
playbook_requested = rfs.SlugRelatedField(
slug_field="name",
@@ -927,6 +927,7 @@ def set_analyzers_to_execute(
observable_classification: str,
**kwargs,
) -> List[AnalyzerConfig]:
+ logger.debug(f"{analyzers_requested=} {type(analyzers_requested)=}")
analyzers_to_execute = analyzers_requested.copy()
partially_filtered_analyzers_qs = AnalyzerConfig.objects.filter(
diff --git a/api_app/views.py b/api_app/views.py
index e54a687b80..473a26d77c 100644
--- a/api_app/views.py
+++ b/api_app/views.py
@@ -39,6 +39,7 @@
from certego_saas.ext.viewsets import ReadAndDeleteOnlyViewSet
from intel_owl import tasks
from intel_owl.celery import app as celery_app
+from intel_owl.settings._util import get_environment
from .analyzers_manager.constants import ObservableTypes
from .choices import ObservableClassification
@@ -324,10 +325,12 @@ def analyze_multiple_observables(request):
- 200: JSON response with the job details for each initiated analysis.
"""
logger.info(f"received analyze_multiple_observables from user {request.user}")
+ logger.debug(f"{request.data=}")
oas = ObservableAnalysisSerializer(
data=request.data, many=True, context={"request": request}
)
oas.is_valid(raise_exception=True)
+ logger.debug(f"{oas.validated_data=}")
parent_job = oas.validated_data[0].get("parent_job", None)
jobs = oas.save(send_task=True, parent=parent_job)
jrs = JobResponseSerializer(jobs, many=True).data
@@ -936,7 +939,9 @@ def __aggregation_response_dynamic(
if len(most_frequent_values):
annotations = {
- val: Count(field_name, filter=Q(**{field_name: val}))
+ val.replace(" ", "")
+ .replace("?", "")
+ .replace(";", ""): Count(field_name, filter=Q(**{field_name: val}))
for val in most_frequent_values
}
logger.debug(f"request: {field_name} annotations: {annotations}")
@@ -1792,7 +1797,7 @@ def get(self, request):
# 3 return data
elastic_response = (
- Search(index="plugin-report-*")
+ Search(index=f"plugin-report-{get_environment()}*")
.query(QElastic("bool", filter=filter_list))
.extra(size=10000) # max allowed size
.execute()
diff --git a/api_app/visualizers_manager/classes.py b/api_app/visualizers_manager/classes.py
index 8931ad5f3b..c800c48afa 100644
--- a/api_app/visualizers_manager/classes.py
+++ b/api_app/visualizers_manager/classes.py
@@ -548,3 +548,10 @@ def pivots_reports(self) -> QuerySet:
from api_app.pivots_manager.models import PivotReport
return PivotReport.objects.filter(job=self._job)
+
+ def data_models(self) -> QuerySet:
+ from api_app.analyzers_manager.models import AnalyzerReport
+
+ data_model_class = AnalyzerReport.get_data_model_class(self._job)
+ analyzer_reports_pk = [report.pk for report in self.analyzer_reports()]
+ return data_model_class.objects.filter(analyzers_report__in=analyzer_reports_pk)
diff --git a/api_app/visualizers_manager/migrations/0040_visualizer_config_data_model.py b/api_app/visualizers_manager/migrations/0040_visualizer_config_data_model.py
new file mode 100644
index 0000000000..b8a487bceb
--- /dev/null
+++ b/api_app/visualizers_manager/migrations/0040_visualizer_config_data_model.py
@@ -0,0 +1,117 @@
+from django.db import migrations
+from django.db.models.fields.related_descriptors import (
+ ForwardManyToOneDescriptor,
+ ForwardOneToOneDescriptor,
+ ManyToManyDescriptor,
+ ReverseManyToOneDescriptor,
+ ReverseOneToOneDescriptor,
+)
+
+plugin = {
+ "python_module": {
+ "health_check_schedule": None,
+ "update_schedule": None,
+ "module": "data_model.DataModel",
+ "base_path": "api_app.visualizers_manager.visualizers",
+ },
+ "playbooks": ["FREE_TO_USE_ANALYZERS"],
+ "name": "Data_Model",
+ "description": "Visualizer for Data Models",
+ "disabled": False,
+ "soft_time_limit": 60,
+ "routing_key": "default",
+ "health_check_status": True,
+ "model": "visualizers_manager.VisualizerConfig",
+}
+
+params = []
+
+values = []
+
+
+def _get_real_obj(Model, field, value):
+ def _get_obj(Model, other_model, value):
+ if isinstance(value, dict):
+ real_vals = {}
+ for key, real_val in value.items():
+ real_vals[key] = _get_real_obj(other_model, key, real_val)
+ value = other_model.objects.get_or_create(**real_vals)[0]
+ # it is just the primary key serialized
+ else:
+ if isinstance(value, int):
+ if Model.__name__ == "PluginConfig":
+ value = other_model.objects.get(name=plugin["name"])
+ else:
+ value = other_model.objects.get(pk=value)
+ else:
+ value = other_model.objects.get(name=value)
+ return value
+
+ if (
+ type(getattr(Model, field))
+ in [
+ ForwardManyToOneDescriptor,
+ ReverseManyToOneDescriptor,
+ ReverseOneToOneDescriptor,
+ ForwardOneToOneDescriptor,
+ ]
+ and value
+ ):
+ other_model = getattr(Model, field).get_queryset().model
+ value = _get_obj(Model, other_model, value)
+ elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value:
+ other_model = getattr(Model, field).rel.model
+ value = [_get_obj(Model, other_model, val) for val in value]
+ return value
+
+
+def _create_object(Model, data):
+ mtm, no_mtm = {}, {}
+ for field, value in data.items():
+ value = _get_real_obj(Model, field, value)
+ if type(getattr(Model, field)) is ManyToManyDescriptor:
+ mtm[field] = value
+ else:
+ no_mtm[field] = value
+ try:
+ o = Model.objects.get(**no_mtm)
+ except Model.DoesNotExist:
+ o = Model(**no_mtm)
+ o.full_clean()
+ o.save()
+ for field, value in mtm.items():
+ attribute = getattr(o, field)
+ if value is not None:
+ attribute.set(value)
+ return False
+ return True
+
+
+def migrate(apps, schema_editor):
+ Parameter = apps.get_model("api_app", "Parameter")
+ PluginConfig = apps.get_model("api_app", "PluginConfig")
+ python_path = plugin.pop("model")
+ Model = apps.get_model(*python_path.split("."))
+ if not Model.objects.filter(name=plugin["name"]).exists():
+ exists = _create_object(Model, plugin)
+ if not exists:
+ for param in params:
+ _create_object(Parameter, param)
+ for value in values:
+ _create_object(PluginConfig, value)
+
+
+def reverse_migrate(apps, schema_editor):
+ python_path = plugin.pop("model")
+ Model = apps.get_model(*python_path.split("."))
+ Model.objects.get(name=plugin["name"]).delete()
+
+
+class Migration(migrations.Migration):
+ atomic = False
+ dependencies = [
+ ("api_app", "0064_vt_sample_download"),
+ ("visualizers_manager", "0039_sample_download"),
+ ]
+
+ operations = [migrations.RunPython(migrate, reverse_migrate)]
diff --git a/api_app/visualizers_manager/visualizers/data_model.py b/api_app/visualizers_manager/visualizers/data_model.py
new file mode 100644
index 0000000000..ced5cc1ac5
--- /dev/null
+++ b/api_app/visualizers_manager/visualizers/data_model.py
@@ -0,0 +1,425 @@
+from logging import getLogger
+from typing import Dict, List
+
+from api_app.analyzers_manager.models import AnalyzerReport
+from api_app.data_model_manager.enums import DataModelEvaluations
+from api_app.data_model_manager.models import (
+ DomainDataModel,
+ FileDataModel,
+ IPDataModel,
+)
+from api_app.visualizers_manager.classes import Visualizer
+from api_app.visualizers_manager.decorators import (
+ visualizable_error_handler_with_params,
+)
+from api_app.visualizers_manager.enums import VisualizableTableColumnSize
+
+logger = getLogger(__name__)
+
+
+class DataModel(Visualizer):
+ @classmethod
+ def update(cls) -> bool:
+ pass
+
+ @visualizable_error_handler_with_params("get_eval_list")
+ def get_eval_list(self, evaluation, color, icon, data_models):
+ disable_element = not bool(data_models)
+ return self.VList(
+ name=self.Base(
+ value=evaluation,
+ color=color if not disable_element else Visualizer.Color.TRANSPARENT,
+ icon=icon,
+ disable=False,
+ ),
+ value=[
+ self.Base(
+ value=data_model.analyzers_report.all().first().config.name,
+ disable=False,
+ )
+ for data_model in data_models
+ ],
+ size=self.Size.S_2,
+ disable=disable_element,
+ start_open=True,
+ )
+
+ @visualizable_error_handler_with_params("get_base_data_list")
+ def get_base_data_list(self, name, values_list):
+ disable_element = not bool(values_list)
+ return self.VList(
+ name=self.Base(value=name, disable=False),
+ value=values_list,
+ disable=disable_element,
+ start_open=True,
+ )
+
+ @visualizable_error_handler_with_params("get_field")
+ def get_field(self, field, data_models):
+ for data_model in data_models:
+ value = getattr(data_model, field, None)
+ if value:
+ return Visualizer.Title(
+ title=Visualizer.Base(value=field.replace("_", " "), disable=False),
+ value=Visualizer.Base(
+ value=value,
+ disable=False,
+ ),
+ disable=False,
+ )
+
+ return Visualizer.Title(
+ title=Visualizer.Base(value=field.replace("_", " "), disable=True),
+ value=Visualizer.Base(
+ value="",
+ disable=True,
+ ),
+ disable=True,
+ )
+
+ @visualizable_error_handler_with_params("get_resolutions")
+ def get_resolutions(self, data_models):
+ resolutions = []
+ for data_model in data_models:
+ if data_model.resolutions:
+ resolutions.append(
+ self.VList(
+ name=self.Base(
+ value=data_model.analyzers_report.all().first().config.name,
+ disable=False,
+ ),
+ value=[
+ self.Base(
+ value=resolution,
+ disable=False,
+ )
+ for resolution in data_model.resolutions
+ ],
+ size=self.Size.S_2,
+ disable=False,
+ start_open=True,
+ )
+ )
+ return resolutions
+
+ @visualizable_error_handler_with_params("get_pdns")
+ def get_pdns(self, data_models):
+ columns = [
+ self.TableColumn(
+ name="rrname", max_width=VisualizableTableColumnSize.S_300
+ ),
+ self.TableColumn(name="rrtype", max_width=VisualizableTableColumnSize.S_50),
+ self.TableColumn(name="rdata", max_width=VisualizableTableColumnSize.S_300),
+ self.TableColumn(
+ name="time_first", max_width=VisualizableTableColumnSize.S_100
+ ),
+ self.TableColumn(
+ name="time_last", max_width=VisualizableTableColumnSize.S_100
+ ),
+ self.TableColumn(
+ name="analyzer", max_width=VisualizableTableColumnSize.S_200
+ ),
+ ]
+
+ data = []
+ for data_model in data_models:
+ ietf_reports = data_model.ietf_report.all()
+ for report in ietf_reports:
+ data.append(
+ {
+ "rrname": self.Base(
+ value=report.rrname,
+ color=self.Color.TRANSPARENT,
+ disable=False,
+ ),
+ "rrtype": self.Base(
+ value=report.rrtype,
+ color=self.Color.TRANSPARENT,
+ disable=False,
+ ),
+ "rdata": self.VList(
+ value=[
+ self.Base(
+ value=rdata,
+ color=self.Color.TRANSPARENT,
+ disable=False,
+ )
+ for rdata in report.rdata
+ ],
+ disable=False,
+ ),
+ "time_first": self.Base(
+ value=report.time_first.strftime("%Y-%m-%d %H:%M:%S"),
+ color=self.Color.TRANSPARENT,
+ disable=False,
+ ),
+ "time_last": self.Base(
+ value=report.time_last.strftime("%Y-%m-%d %H:%M:%S"),
+ color=self.Color.TRANSPARENT,
+ disable=False,
+ ),
+ "analyzer": self.Base(
+ value=data_model.analyzers_report.all().first().config.name,
+ color=self.Color.TRANSPARENT,
+ disable=False,
+ ),
+ }
+ )
+
+ return self.Table(
+ columns=columns,
+ data=data,
+ size=Visualizer.Size.S_ALL,
+ page_size=10,
+ sort_by_id="last_view",
+ sort_by_desc=True,
+ )
+
+ @visualizable_error_handler_with_params("get_signatures")
+ def get_signatures(self, data_models):
+ columns = [
+ self.TableColumn(
+ name="provider", max_width=VisualizableTableColumnSize.S_100
+ ),
+ self.TableColumn(name="url", max_width=VisualizableTableColumnSize.S_300),
+ self.TableColumn(name="score", max_width=VisualizableTableColumnSize.S_50),
+ self.TableColumn(
+ name="analyzer", max_width=VisualizableTableColumnSize.S_100
+ ),
+ ]
+
+ data = []
+ for data_model in data_models:
+ signatures = data_model.signatures.all()
+ for signature in signatures:
+ data.append(
+ {
+ "provider": self.Base(
+ value=signature.provider,
+ color=self.Color.TRANSPARENT,
+ disable=False,
+ ),
+ "url": self.Base(
+ value=(
+ signature.url if signature.url else "No url available"
+ ),
+ link=signature.url,
+ color=self.Color.TRANSPARENT,
+ disable=not signature.url,
+ ),
+ "score": self.Base(
+ value=signature.score,
+ color=self.Color.TRANSPARENT,
+ disable=False,
+ ),
+ "analyzer": self.Base(
+ value=data_model.analyzers_report.all().first().config.name,
+ color=self.Color.TRANSPARENT,
+ disable=False,
+ ),
+ }
+ )
+
+ return self.Table(
+ columns=columns,
+ data=data,
+ size=Visualizer.Size.S_ALL,
+ page_size=10,
+ sort_by_id="provider",
+ )
+
+ def get_domain_data_elements(self, page, data_models):
+ page.add_level(
+ self.Level(
+ position=3,
+ size=self.LevelSize.S_4,
+ horizontal_list=self.HList(value=self.get_resolutions(data_models)),
+ )
+ )
+
+ page.add_level(
+ self.Level(
+ position=4,
+ size=self.LevelSize.S_4,
+ horizontal_list=self.HList(value=[self.get_field("rank", data_models)]),
+ )
+ )
+
+ page.add_level(
+ self.Level(
+ position=5,
+ size=self.LevelSize.S_5,
+ horizontal_list=self.HList(value=[self.get_pdns(data_models)]),
+ )
+ )
+
+ def get_ip_data_elements(self, page, data_models):
+ page.add_level(
+ self.Level(
+ position=3,
+ size=self.LevelSize.S_4,
+ horizontal_list=self.HList(value=self.get_resolutions(data_models)),
+ )
+ )
+
+ page.add_level(
+ self.Level(
+ position=4,
+ size=self.LevelSize.S_4,
+ horizontal_list=self.HList(
+ value=[
+ self.get_field(field, data_models)
+ for field in [
+ "asn",
+ "asn_rank",
+ "org_name",
+ "country_code",
+ "registered_country_code",
+ "isp",
+ ]
+ ]
+ ),
+ )
+ )
+
+ page.add_level(
+ self.Level(
+ position=5,
+ size=self.LevelSize.S_5,
+ horizontal_list=self.HList(value=[self.get_pdns(data_models)]),
+ )
+ )
+
+ def get_file_data_elements(self, page, data_models):
+ page.add_level(
+ self.Level(
+ position=3,
+ size=self.LevelSize.S_5,
+ horizontal_list=self.HList(value=[self.get_signatures(data_models)]),
+ )
+ )
+
+ def run(self) -> List[Dict]:
+ trusted_data_models = []
+ clean_data_models = []
+ suspicious_data_models = []
+ malicious_data_models = []
+ noeval_data_models = []
+ data_models = self.data_models()
+
+ for data_model in data_models:
+ printable_analyzer_name = (
+ data_model.analyzers_report.all().first().config.name.replace("_", " ")
+ )
+ logger.debug(f"{printable_analyzer_name}, {data_model}")
+
+ evaluation = ""
+ if data_model.evaluation:
+ evaluation = data_model.evaluation
+
+ if evaluation == DataModelEvaluations.TRUSTED.value:
+ trusted_data_models.append(data_model)
+ elif evaluation == DataModelEvaluations.CLEAN.value:
+ clean_data_models.append(data_model)
+ elif evaluation == DataModelEvaluations.SUSPICIOUS.value:
+ suspicious_data_models.append(data_model)
+ elif evaluation == DataModelEvaluations.MALICIOUS.value:
+ malicious_data_models.append(data_model)
+ else:
+ noeval_data_models.append(data_model)
+
+ evals_vlists = []
+ for evaluation, color, icon, eval_data_models in [
+ (
+ "no evaluation",
+ Visualizer.Color.SECONDARY,
+ Visualizer.Icon.INFO,
+ noeval_data_models,
+ ),
+ (
+ DataModelEvaluations.CLEAN.value,
+ Visualizer.Color.SUCCESS,
+ Visualizer.Icon.LIKE,
+ clean_data_models,
+ ),
+ (
+ DataModelEvaluations.TRUSTED.value,
+ Visualizer.Color.SUCCESS,
+ Visualizer.Icon.LIKE,
+ trusted_data_models,
+ ),
+ (
+ DataModelEvaluations.SUSPICIOUS.value,
+ Visualizer.Color.WARNING,
+ Visualizer.Icon.WARNING,
+ suspicious_data_models,
+ ),
+ (
+ DataModelEvaluations.MALICIOUS.value,
+ Visualizer.Color.DANGER,
+ Visualizer.Icon.MALWARE,
+ malicious_data_models,
+ ),
+ ]:
+ evals_vlists.append(
+ self.get_eval_list(evaluation, color, icon, eval_data_models)
+ )
+
+ related_threats = []
+ external_references = []
+ malware_families = []
+ tags = []
+
+ for data_model in data_models:
+ related_threats.extend(data_model.related_threats)
+ external_references.extend(data_model.external_references)
+ if data_model.malware_family:
+ malware_families.append(data_model.malware_family)
+ if data_model.tags:
+ tags.extend(data_model.tags)
+
+ related_threats = list(set(related_threats))
+ external_references = list(set(external_references))
+ malware_families = list(set(malware_families))
+ tags = list(set(tags))
+
+ base_data_vlists = []
+ for name, values_list in [
+ ("Tags", tags),
+ ("Related threats", related_threats),
+ ("Malware families", malware_families),
+ ("External references", external_references),
+ ]:
+ base_data_vlists.append(self.get_base_data_list(name, values_list))
+
+ page = self.Page(name="DataModel")
+ page.add_level(
+ self.Level(
+ position=1,
+ size=self.LevelSize.S_4,
+ horizontal_list=self.HList(value=evals_vlists),
+ )
+ )
+
+ page.add_level(
+ self.Level(
+ position=2,
+ size=self.LevelSize.S_4,
+ horizontal_list=self.HList(value=base_data_vlists),
+ )
+ )
+
+ data_model_class = AnalyzerReport.get_data_model_class(self._job)
+ if data_model_class == DomainDataModel:
+ self.get_domain_data_elements(page, data_models)
+ elif data_model_class == IPDataModel:
+ self.get_ip_data_elements(page, data_models)
+ elif data_model_class == FileDataModel:
+ self.get_file_data_elements(page, data_models)
+
+ return [page.to_dict()]
+
+ @classmethod
+ def _monkeypatch(cls):
+ patches = []
+ return super()._monkeypatch(patches=patches)
diff --git a/docker/.env b/docker/.env
index 5117a613dc..2c83d70c03 100644
--- a/docker/.env
+++ b/docker/.env
@@ -1,6 +1,6 @@
### DO NOT CHANGE THIS VALUE !!
### It should be updated only when you pull latest changes off from the 'master' branch of IntelOwl.
# this variable must start with "REACT_APP_" to be used in the frontend too
-REACT_APP_INTELOWL_VERSION=v6.2.1
+REACT_APP_INTELOWL_VERSION=v6.3.0
# if you want to use a nfs volume for shared files
# NFS_ADDRESS=
diff --git a/docker/Dockerfile b/docker/Dockerfile
index e1f7191024..de5c5c5a5f 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -15,14 +15,15 @@ RUN npm install npm@latest --location=global \
# Stage 2: Backend
FROM python:3.11.7 AS backend-build
-ENV PYTHONUNBUFFERED 1
-ENV DJANGO_SETTINGS_MODULE intel_owl.settings
-ENV PYTHONPATH /opt/deploy/intel_owl
-ENV LOG_PATH /var/log/intel_owl
+ENV PYTHONUNBUFFERED=1
+ENV DJANGO_SETTINGS_MODULE=intel_owl.settings
+ENV PYTHONPATH=/opt/deploy/intel_owl
+ENV LOG_PATH=/var/log/intel_owl
ARG REPO_DOWNLOADER_ENABLED=true
ARG WATCHMAN=false
-ENV watch_logs_cmd "watch -n1 tail -n10 /var/log/intel_owl/django/api_app.log"
-ARG PYCTI_VERSION=6.1.0
+ENV watch_logs_cmd="watch -n1 tail -n10 /var/log/intel_owl/django/api_app.log"
+# This is required to allow compatibility with different OpenCTI instances
+ARG PYCTI_VERSION=6.5.1
RUN mkdir -p ${LOG_PATH} \
${LOG_PATH}/django \
@@ -34,18 +35,15 @@ RUN mkdir -p ${LOG_PATH} \
# python3-psycopg2 is required to use PostgresSQL with Django
# apache2-utils is required to execute htpasswd
# tshark is required for Hfinger file analyzer
+# libemail-outlook-message-perl and libemail-address-perl are required for msgconvert
RUN apt-get update \
&& apt-get install -y --no-install-recommends apt-utils libsasl2-dev libssl-dev netcat-traditional \
- vim libldap2-dev libfuzzy-dev net-tools python3-psycopg2 git apache2-utils tshark \
- && apt-get clean \
+ vim libldap2-dev libfuzzy-dev net-tools python3-psycopg2 git apache2-utils tshark \
+ libemail-outlook-message-perl libemail-address-perl \
+ && apt-get clean && apt-get autoclean && apt-get autoremove -y \
&& rm -rf /var/lib/apt/lists/* \
&& pip3 install --no-cache-dir --upgrade pip
-# perl not interactive
-ENV PERL_MM_USE_DEFAULT 1
-# msgconvert
-RUN cpan -T Email::Outlook::Message
-
COPY requirements/project-requirements.txt $PYTHONPATH/project-requirements.txt
COPY requirements/certego-requirements.txt $PYTHONPATH/certego-requirements.txt
WORKDIR $PYTHONPATH
@@ -77,5 +75,5 @@ COPY --from=frontend-build /build /var/www/reactapp
# HOME_DIR = f"{Path.home()}/.quark-engine/"
# Path(HOME_DIR).mkdir(parents=True, exist_ok=True)
# so we have to set the home env variable to allow to create its directory
-ENV HOME ${PYTHONPATH}
+ENV HOME="${PYTHONPATH}"
diff --git a/docker/Dockerfile_nginx b/docker/Dockerfile_nginx
index 134f3da1fe..bb6b812bc8 100644
--- a/docker/Dockerfile_nginx
+++ b/docker/Dockerfile_nginx
@@ -1,6 +1,6 @@
-FROM library/nginx:1.27.0-alpine
+FROM library/nginx:1.27.3-alpine
-ENV NGINX_LOG_DIR /var/log/nginx
+ENV NGINX_LOG_DIR=/var/log/nginx
# this is to avoid having these logs redirected to stdout/stderr
RUN rm $NGINX_LOG_DIR/access.log $NGINX_LOG_DIR/error.log && touch $NGINX_LOG_DIR/access.log $NGINX_LOG_DIR/error.log
VOLUME /var/log/nginx
diff --git a/docker/hooks/build b/docker/hooks/build
new file mode 100644
index 0000000000..fa3372f7e4
--- /dev/null
+++ b/docker/hooks/build
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+echo "display path"
+echo "$(pwd)"
+echo "display dockerfile path"
+echo $DOCKERFILE_PATH
+cd ..
+echo "current branch"
+echo "$SOURCE_BRANCH"
+
+version_regex='^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$'
+if [[ "$SOURCE_BRANCH" == "master" || "$SOURCE_BRANCH" =~ $version_regex ]]; then
+ echo "The branch is master, proceeding with multi-arch build"
+ docker buildx create --name multiarch --use
+ docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --platform linux/arm64,linux/amd64 --push .
+else
+ echo "The branch is not master, proceeding with classic build"
+ docker buildx build -f "$DOCKERFILE_PATH" -t "$IMAGE_NAME" --push .
+fi
\ No newline at end of file
diff --git a/docker/hooks/post_push b/docker/hooks/post_push
new file mode 100644
index 0000000000..ebe7a8a930
--- /dev/null
+++ b/docker/hooks/post_push
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+if [[ "$SOURCE_BRANCH" == "master" ]]; then
+ curl -d "text=A new production image has been pushed to Docker Hub" -d "channel=$SLACK_CHANNEL" -H "Authorization: Bearer $SLACK_TOKEN" -X POST https://slack.com/api/chat.postMessage
+fi
\ No newline at end of file
diff --git a/frontend/package-lock.json b/frontend/package-lock.json
index f33df493e7..7e80aaef7a 100644
--- a/frontend/package-lock.json
+++ b/frontend/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "intelowl",
- "version": "6.1.0",
+ "version": "6.2.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "intelowl",
- "version": "6.1.0",
+ "version": "6.2.0",
"dependencies": {
"@certego/certego-ui": "^0.1.13",
"@dagrejs/dagre": "^1.1.4",
diff --git a/frontend/package.json b/frontend/package.json
index 95652e44d0..fed81859fb 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -1,6 +1,6 @@
{
"name": "intelowl",
- "version": "6.2.1",
+ "version": "6.3.0",
"private": true,
"proxy": "http://localhost:80/",
"dependencies": {
diff --git a/frontend/src/components/GuideWrapper.jsx b/frontend/src/components/GuideWrapper.jsx
index 1006237a78..8a692e8640 100644
--- a/frontend/src/components/GuideWrapper.jsx
+++ b/frontend/src/components/GuideWrapper.jsx
@@ -3,6 +3,7 @@ import Joyride from "react-joyride";
import { Outlet, useNavigate } from "react-router-dom";
import { useMount } from "react-use";
import { useGuideContext } from "../contexts/GuideContext";
+import { INTELOWL_DOCS_URL } from "../constants/environment";
export default function GuideWrapper() {
const { guideState, setGuideState } = useGuideContext();
@@ -17,8 +18,7 @@ export default function GuideWrapper() {
Welcome to IntelOwls Guide for First Time Visitors! For further
questions you could either check out our{" "}
- docs or reach
- us out on{" "}
+ docs or reach us out on{" "}
the official IntelOwl slack channel
diff --git a/frontend/src/components/common/flows/getLayoutedElements.js b/frontend/src/components/common/flows/getLayoutedElements.js
new file mode 100644
index 0000000000..b862a2cfe5
--- /dev/null
+++ b/frontend/src/components/common/flows/getLayoutedElements.js
@@ -0,0 +1,38 @@
+import dagre from "@dagrejs/dagre";
+
+/* eslint-disable id-length */
+export function getLayoutedElements(
+ nodes,
+ edges,
+ nodeWidth,
+ nodeHeight,
+ deltaX,
+ deltaY,
+) {
+ // needed for graph layout
+ const dagreGraph = new dagre.graphlib.Graph();
+ dagreGraph.setDefaultEdgeLabel(() => ({}));
+
+ dagreGraph.setGraph({ rankdir: "LR" });
+
+ nodes.forEach((node) => {
+ dagreGraph.setNode(node.id, { width: nodeWidth, height: nodeHeight });
+ });
+
+ edges.forEach((edge) => {
+ dagreGraph.setEdge(edge.source, edge.target);
+ });
+
+ dagre.layout(dagreGraph);
+
+ nodes.forEach((node) => {
+ const nodeWithPosition = dagreGraph.node(node.id);
+ // eslint-disable-next-line no-param-reassign
+ node.position = {
+ x: nodeWithPosition.x - nodeWidth / 2 + deltaX,
+ y: nodeWithPosition.y - nodeHeight / 2 + deltaY,
+ };
+ return node;
+ });
+ return { nodes, edges };
+}
diff --git a/frontend/src/components/common/form/TLPSelectInput.jsx b/frontend/src/components/common/form/TLPSelectInput.jsx
index abea221f18..1784975836 100644
--- a/frontend/src/components/common/form/TLPSelectInput.jsx
+++ b/frontend/src/components/common/form/TLPSelectInput.jsx
@@ -14,6 +14,7 @@ import { TLPDescriptions } from "../../../constants/miscConst";
import { TlpChoices } from "../../../constants/advancedSettingsConst";
import { TLPTag } from "../TLPTag";
import { TLPColors } from "../../../constants/colorConst";
+import { INTELOWL_DOCS_URL } from "../../../constants/environment";
export function TLPSelectInputLabel(props) {
const { size } = props;
@@ -36,7 +37,7 @@ export function TLPSelectInputLabel(props) {
For more info check the{" "}
official doc.
diff --git a/frontend/src/components/dashboard/charts.jsx b/frontend/src/components/dashboard/charts.jsx
index f16f034571..95a8857a29 100644
--- a/frontend/src/components/dashboard/charts.jsx
+++ b/frontend/src/components/dashboard/charts.jsx
@@ -1,5 +1,6 @@
import React from "react";
import { Bar } from "recharts";
+import PropTypes from "prop-types";
import { getRandomColorsArray, AnyChartWidget } from "@certego/certego-ui";
@@ -215,3 +216,31 @@ export const JobTopTLPBarChart = React.memo((props) => {
return
For more info check the{" "}
official doc.
diff --git a/frontend/src/components/plugins/flows/CustomPivotNode.jsx b/frontend/src/components/plugins/flows/CustomPivotNode.jsx
new file mode 100644
index 0000000000..0c5ace8fb1
--- /dev/null
+++ b/frontend/src/components/plugins/flows/CustomPivotNode.jsx
@@ -0,0 +1,89 @@
+import React from "react";
+import PropTypes from "prop-types";
+import { Handle, Position, NodeToolbar } from "reactflow";
+import "reactflow/dist/style.css";
+import { Badge } from "reactstrap";
+import { IoMdWarning } from "react-icons/io";
+
+function CustomPivotNode({ data }) {
+ return (
+ <>
+