Skip to content
Open
Show file tree
Hide file tree
Changes from 20 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
94 changes: 53 additions & 41 deletions docs/Usage.md
Original file line number Diff line number Diff line change
Expand Up @@ -646,58 +646,70 @@ python nettacker.py --start-api --api-access-log --api-port 8080 --api-debug-mod
![](https://github.com/aman566/DiceGameJS/blob/master/Screencast-from-Tuesday-09-June-2020-02-32-32-IST-_online-video-cutter.com_.gif)

# Database
OWASP Nettacker, currently supports two databases:

OWASP Nettacker, currently supports three databases:

- SQLite
- MySQL
- PostgreSQL

The default database is SQLite. You can, however, configure the db to your liking.

## SQLite configuration
The SQLite database can be configured in `core/config.py` file under the `_database_config()` function. Here is a sample configuration:
```
return {
"DB": "sqlite",
"DATABASE": _paths()["home_path"] + "/nettacker.db", # This is the location of your db
"USERNAME": "",
"PASSWORD": "",
"HOST": "",
"PORT": ""
}
```

The configurations below are for a SQLite wrapper called **APSW** (Another Python SQLite Wrapper). The configurations can be found inside `nettacker/config.py` file under the `DBConfig` class.


engine = "sqlite"
name = str(CWD / ".nettacker/data/nettacker.db")
host = ""
port = ""
username = ""
password = ""
ssl_mode = "disable"
journal_mode = "WAL"
synchronous_mode = "NORMAL"

These are the default and recommended settings. Feel free to play around and change them according to need. To use SQLite database, ensure that the `engine` value is set to `sqlite` and the `name` is the path to your database. The `journal_mode` and `synchronous_mode` are chosen to be optimal for multithreaded I/O operations.

> Note: You can choose to use a lite wrapper for Sqlite called APSW by setting the `use_apsw_for_sqlite` parameter inside config to True for performance enhancements.

## MySQL configuration:
The MySQL database can be configured in `core/config.py` file under the `_database_config()` function. Here is a sample configuration:
```
return {
"DB": "mysql",
"DATABASE": "nettacker", # This is the name of your db
"USERNAME": "username",
"PASSWORD": "password",
"HOST": "localhost or some other host",
"PORT": "3306 or some other custom port"
}

The MySQL database can be configured in `nettacker/config.py` file under the `DBConfig` class. Here is a sample configuration:

```
After this configuration:
1. Open the configuration file of mysql(`/etc/mysql/my.cnf` in case of linux) as a sudo user
2. Add this to the end of the file :
```
[mysqld]
sql_mode = "STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION"
engine = "mysql"
name = "nettacker"
host = "localhost"
port = 3306
username = "root"
password = "some-password"
ssl_mode = "disable"
journal_mode = "WAL"
synchronous_mode = "NORMAL"
```
3. Restart MySQL

Only the relevant fields will be considered and you don't need to update/change/remove the irrelevant ones (`ssl_mode`, `journal_mode` and `synchronous_mode` aren't relevant in this case).

## Postgres Configuration

The Postgres database can be configured in core/config.py file under the _database_config() function. Here is a sample configuration:
`
return {
"DB": "postgreas",
"DATABASE": "nettacker" # Name of db
"USERNAME": "username",
"PASSWORD": "password",
"HOST": "localhost or some other host",
"PORT": "5432 or some other custom port"
}
`
After this configuration please comment out the following line in database/db.py `connect_args={'check_same_thread': False}`
The Postgres database can be configured in `nettacker/config.py` file under the `DBConfig` class. Here is a sample configuration:

```
engine = "postgres"
name = "nettacker"
host = "localhost"
port = 5432
username = "root"
password = "some-password"
ssl_mode = "disable"
journal_mode = "WAL"
synchronous_mode = "NORMAL"
```

In this case the irrelevant fields are `journal_mode` and `synchronous_mode`. You don't have to update/change/remove them.

**Note**: If you want encryption, then set `ssl_mode` to `require`.

Let me know if you have any more questions.
13 changes: 12 additions & 1 deletion nettacker/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,11 @@ class DbConfig(ConfigBase):
For sqlite database:
fill the name of the DB as sqlite,
DATABASE as the name of the db user wants
other details can be left empty
Set the journal_mode (default="WAL") and
synchronous_mode (default="NORMAL"). Rest
of the fields can be left empty
This is the default database:
str(CWD / ".nettacker/data/nettacker.db")
For mysql users:
fill the ENGINE name of the DB as mysql
NAME as the name of the database you want to create
Expand All @@ -104,6 +108,8 @@ class DbConfig(ConfigBase):
username = ""
password = ""
ssl_mode = "disable"
journal_mode = "WAL"
synchronous_mode = "NORMAL"


class PathConfig:
Expand Down Expand Up @@ -142,6 +148,9 @@ class DefaultSettings(ConfigBase):
parallel_module_scan = 1
passwords = None
passwords_list = None
use_apsw_for_sqlite = (
False # Setting to toggle between APSW and SQLAlchemy for sqlite databases
)
ping_before_scan = False
ports = None
profiles = None
Expand All @@ -151,6 +160,8 @@ class DefaultSettings(ConfigBase):
random_chars=generate_random_token(10),
)
retries = 1
max_retries = 3
retry_delay = 0.1
scan_ip_range = False
scan_subdomains = False
selected_modules = None
Expand Down
4 changes: 1 addition & 3 deletions nettacker/core/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,9 +158,7 @@ def expand_targets(self, scan_id):

for target in copy.deepcopy(self.arguments.targets):
for row in find_events(target, "subdomain_scan", scan_id):
for sub_domain in json.loads(row.json_event)["response"]["conditions_results"][
"content"
]:
for sub_domain in json.loads(row)["response"]["conditions_results"]["content"]:
if sub_domain not in self.arguments.targets:
self.arguments.targets.append(sub_domain)
# icmp_scan
Expand Down
6 changes: 3 additions & 3 deletions nettacker/core/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def build_text_table(events):
table_headers = ["date", "target", "module_name", "port", "logs"]
_table.add_rows([table_headers])
for event in events:
log = merge_logs_to_list(json.loads(event["json_event"]), [])
log = merge_logs_to_list(event, [])
_table.add_rows(
[
table_headers,
Expand Down Expand Up @@ -252,15 +252,15 @@ def create_report(options, scan_id):
)
index = 1
for event in all_scan_logs:
log_list = merge_logs_to_list(json.loads(event["json_event"]), [])
log_list = merge_logs_to_list(event, [])
html_table_content += log_data.table_items.format(
event["date"],
event["target"],
event["module_name"],
event["port"],
"<br>".join(log_list) if log_list else "Detected", # event["event"], #log
index,
html.escape(event["json_event"]),
html.escape(json.dumps(event)),
)
index += 1
html_table_content += (
Expand Down
2 changes: 1 addition & 1 deletion nettacker/core/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def load(self):
if not self.skip_service_discovery and self.module_name not in self.ignored_core_modules:
services = {}
for service in find_events(self.target, "port_scan", self.scan_id):
service_event = json.loads(service.json_event)
service_event = json.loads(service)
port = service_event["port"]
protocols = service_event["response"]["conditions_results"].keys()
for protocol in protocols:
Expand Down
5 changes: 5 additions & 0 deletions nettacker/core/utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import datetime
import hashlib
import importlib
import json
import math
import multiprocessing
import random
Expand Down Expand Up @@ -32,6 +33,10 @@ def replace_dependent_response(log, response_dependent):

def merge_logs_to_list(result, log_list=[]):
if isinstance(result, dict):
# Doesn't hurt normal operations
if "json_event" in list(result.keys()):
if not isinstance(result["json_event"], dict):
result["json_event"] = json.loads(result["json_event"])
for i in result:
if "log" == i:
log_list.append(result["log"])
Expand Down
Loading
Loading