diff --git a/.github/workflows/install-slips-dependencies.yml b/.github/workflows/install-slips-dependencies.yml index e9a305897..841dd7f20 100644 --- a/.github/workflows/install-slips-dependencies.yml +++ b/.github/workflows/install-slips-dependencies.yml @@ -35,7 +35,9 @@ jobs: run: sysctl vm.overcommit_memory=1 - name: Install APT dependencies - run: sudo apt-get update --fix-missing && sudo apt-get -y --no-install-recommends install $(cat install/apt_dependencies.txt) + run: | + sudo apt-get update --fix-missing && sudo apt-get -y --no-install-recommends install $(cat install/apt_dependencies.txt) + sudo apt-get -y install font-manager - name: Save APT Cache uses: actions/cache@v4 diff --git a/config/slips.yaml b/config/slips.yaml index a130f7dbb..8219d2bc1 100644 --- a/config/slips.yaml +++ b/config/slips.yaml @@ -239,7 +239,7 @@ threatintelligence: # 2 weeks = 604800 seconds mac_db_update : 1209600 - mac_db : https://maclookup.app/downloads/json-database/get-db?t=22-08-19&h=d1d39c52de447a7e7194331f379e1e99f94f35f1 + mac_db : https://maclookup.app/downloads/json-database/get-db?t=24-11-28&h=26271dbc3529f006a4be021ec4cf99fab16e39cd # the file that contains all our TI feeds URLs and their threat level ti_files : config/TI_feeds.csv diff --git a/modules/flowalerts/set_evidence.py b/modules/flowalerts/set_evidence.py index 6735abac5..188f4d5bd 100644 --- a/modules/flowalerts/set_evidence.py +++ b/modules/flowalerts/set_evidence.py @@ -1351,8 +1351,6 @@ def malicious_ssl(self, twid, flow, ssl_info_from_db: str) -> None: timewindow=TimeWindow(number=int(twid.replace("timewindow", ""))), uid=[flow.uid], timestamp=flow.starttime, - src_port=flow.sport, - dst_port=flow.dport, ) self.db.set_evidence(evidence) @@ -1373,8 +1371,6 @@ def malicious_ssl(self, twid, flow, ssl_info_from_db: str) -> None: timewindow=TimeWindow(number=int(twid.replace("timewindow", ""))), uid=[flow.uid], timestamp=flow.starttime, - src_port=flow.sport, - dst_port=flow.dport, ) self.db.set_evidence(evidence) diff --git a/modules/flowalerts/ssl.py b/modules/flowalerts/ssl.py index 2e10937fe..b8c50378f 100644 --- a/modules/flowalerts/ssl.py +++ b/modules/flowalerts/ssl.py @@ -82,7 +82,7 @@ def detect_malicious_ja3(self, twid, flow): self.set_evidence.malicious_ja3(twid, flow, malicious_ja3_dict) if flow.ja3s in malicious_ja3_dict: - self.set_evidence.malicious_ja3s(twid, flow) + self.set_evidence.malicious_ja3s(twid, flow, malicious_ja3_dict) def detect_incompatible_cn(self, profileid, twid, flow): """ diff --git a/modules/http_analyzer/http_analyzer.py b/modules/http_analyzer/http_analyzer.py index 4c8218135..86f6725dc 100644 --- a/modules/http_analyzer/http_analyzer.py +++ b/modules/http_analyzer/http_analyzer.py @@ -196,7 +196,9 @@ def check_multiple_empty_connections(self, twid: str, flow): self.connections_counter[host] = ([], 0) return True - def set_evidence_incompatible_user_agent(self, twid, flow, user_agent): + def set_evidence_incompatible_user_agent( + self, twid, flow, user_agent, vendor + ): os_type: str = user_agent.get("os_type", "").lower() os_name: str = user_agent.get("os_name", "").lower() @@ -207,7 +209,7 @@ def set_evidence_incompatible_user_agent(self, twid, flow, user_agent): f"that belongs to OS: {os_name} " f"type: {os_type} browser: {browser}. " f"while connecting to {flow.host}{flow.uri}. " - f"IP has MAC vendor: {flow.vendor.capitalize()}" + f"IP has MAC vendor: {vendor.capitalize()}" ) evidence: Evidence = Evidence( @@ -298,7 +300,9 @@ def check_incompatible_user_agent(self, profileid, twid, flow): browser = user_agent.get("browser", "").lower() # user_agent = user_agent.get('user_agent', '') if "safari" in browser and "apple" not in vendor: - self.set_evidence_incompatible_user_agent(twid, flow, user_agent) + self.set_evidence_incompatible_user_agent( + twid, flow, user_agent, vendor + ) return True # make sure all of them are lowercase @@ -340,7 +344,7 @@ def check_incompatible_user_agent(self, profileid, twid, flow): # [('microsoft', 'windows', 'NT'), ('android'), ('linux')] # is found in the UA that belongs to an apple device self.set_evidence_incompatible_user_agent( - twid, flow, user_agent + twid, flow, user_agent, vendor ) return True diff --git a/modules/ip_info/asn_info.py b/modules/ip_info/asn_info.py index f30f06601..06a92df0d 100644 --- a/modules/ip_info/asn_info.py +++ b/modules/ip_info/asn_info.py @@ -11,6 +11,9 @@ class ASN: def __init__(self, db=None): self.db = db + # update asn every 1 month + self.update_period = 2592000 + # Open the maxminddb ASN offline db try: self.asn_db = maxminddb.open_database( @@ -51,7 +54,7 @@ def get_cached_asn(self, ip): asn_info["asn"].update({"number": range_info["number"]}) return asn_info - def update_asn(self, cached_data, update_period) -> bool: + def should_update_asn(self, cached_data) -> bool: """ Returns True if - no asn data is found in the db OR ip has no cached info @@ -61,10 +64,10 @@ def update_asn(self, cached_data, update_period) -> bool: try: return ( time.time() - cached_data["asn"]["timestamp"] - ) > update_period + ) > self.update_period except (KeyError, TypeError): - # no there's no cached asn info,or no timestamp, or cached_data is None - # we should update + # no there's no cached asn info,or no timestamp, or + # cached_data is None. we should update return True def get_asn_info_from_geolite(self, ip) -> dict: diff --git a/modules/ip_info/ip_info.py b/modules/ip_info/ip_info.py index f90475b25..ddf628f5e 100644 --- a/modules/ip_info/ip_info.py +++ b/modules/ip_info/ip_info.py @@ -20,7 +20,7 @@ from slips_files.common.flow_classifier import FlowClassifier from slips_files.core.helpers.whitelist.whitelist import Whitelist from .asn_info import ASN -from slips_files.common.abstracts.module import IModule +from slips_files.common.abstracts.module import AsyncModule from slips_files.common.slips_utils import utils from slips_files.core.structures.evidence import ( Evidence, @@ -35,7 +35,7 @@ ) -class IPInfo(IModule): +class IPInfo(AsyncModule): # Name: short name of the module. Do not use spaces name = "IP Info" description = "Get different info about an IP/MAC address" @@ -47,8 +47,6 @@ def init(self): self.asn = ASN(self.db) self.JARM = JARM() self.classifier = FlowClassifier() - # Set the output queue of our database instance - # To which channels do you wnat to subscribe? When a message arrives on the channel the module will wakeup self.c1 = self.db.subscribe("new_ip") self.c2 = self.db.subscribe("new_MAC") self.c3 = self.db.subscribe("new_dns") @@ -59,11 +57,10 @@ def init(self): "new_dns": self.c3, "check_jarm_hash": self.c4, } - # update asn every 1 month - self.update_period = 2592000 self.is_gw_mac_set = False self.whitelist = Whitelist(self.logger, self.db) self.is_running_non_stop: bool = self.db.is_running_non_stop() + self.valid_tlds = whois.validTlds() async def open_dbs(self): """Function to open the different offline databases used in this @@ -76,7 +73,8 @@ async def open_dbs(self): except Exception: self.print( "Error opening the geolite2 db in databases/GeoLite2-ASN.mmdb. " - "Please download it from https://dev.maxmind.com/geoip/docs/databases/asn?lang=en " + "Please download it from " + "https://dev.maxmind.com/geoip/docs/databases/asn?lang=en " "Please note it must be the MaxMind DB version." ) @@ -88,13 +86,13 @@ async def open_dbs(self): except Exception: self.print( "Error opening the geolite2 db in databases/GeoLite2-Country.mmdb. " - "Please download it from https://dev.maxmind.com/geoip/geolite2-free-geolocation-data?lang=en. " + "Please download it from " + "https://dev.maxmind.com/geoip/geolite2-free-geolocation-data?lang=en. " "Please note it must be the MaxMind DB version." ) + self.reading_mac_db_task = asyncio.create_task(self.read_mac_db()) - asyncio.create_task(self.read_macdb()) - - async def read_macdb(self): + async def read_mac_db(self): while True: try: self.mac_db = open("databases/macaddress-db.json", "r") @@ -114,6 +112,7 @@ def get_geocountry(self, ip) -> dict: """ if not hasattr(self, "country_db"): return False + if utils.is_private_ip(ipaddress.ip_address(ip)): # Try to find if it is a local/private IP data = {"geocountry": "Private"} @@ -138,7 +137,7 @@ def get_ip_family(self, ip): """ return socket.AF_INET6 if ":" in ip else socket.AF_INET - def get_rdns(self, ip): + def get_rdns(self, ip: str) -> dict: """ get reverse DNS of an ip returns RDNS of the given ip or False if not found @@ -150,11 +149,12 @@ def get_rdns(self, ip): reverse_dns: str = socket.gethostbyaddr(ip)[0] # if there's no reverse dns record for this ip, reverse_dns will be an ip. try: - # reverse_dns is an ip. there's no reverse dns. don't store + # check if the reverse_dns value is a valid IP address socket.inet_pton(self.get_ip_family(reverse_dns), reverse_dns) + # reverse_dns is an ip. there's no reverse dns. don't store return False except socket.error: - # all good, store it + # reverse_dns is a valid hostname, store it data["reverse_dns"] = reverse_dns self.db.set_ip_info(ip, data) except (socket.gaierror, socket.herror, OSError): @@ -172,7 +172,7 @@ def get_vendor_online(self, mac_addr): # of HTTP/1.1 204 No Content url = "https://api.macvendors.com" try: - response = requests.get(f"{url}/{mac_addr}", timeout=5) + response = requests.get(f"{url}/{mac_addr}", timeout=2) if response.status_code == 200: # this online db returns results in an array like str [{results}], # make it json @@ -212,8 +212,8 @@ def get_vendor_offline(self, mac_addr, profileid): def get_vendor(self, mac_addr: str, profileid: str) -> dict: """ - Returns vendor info of a MAC address either from an offline or an online - database + Returns the vendor info of a MAC address and stores it in slips db + either from an offline or an online database """ if ( @@ -222,22 +222,23 @@ def get_vendor(self, mac_addr: str, profileid: str) -> dict: ): return False - # don't look for the vendor again if we already have it for this profileid + # don't look for the vendor again if we already have it for this + # profileid if self.db.get_mac_vendor_from_profile(profileid): return True - MAC_info: dict = {"MAC": mac_addr} + mac_info: dict = {"MAC": mac_addr} if vendor := self.get_vendor_offline(mac_addr, profileid): - MAC_info["Vendor"] = vendor + mac_info["Vendor"] = vendor self.db.set_mac_vendor_to_profile(profileid, mac_addr, vendor) elif vendor := self.get_vendor_online(mac_addr): - MAC_info["Vendor"] = vendor + mac_info["Vendor"] = vendor self.db.set_mac_vendor_to_profile(profileid, mac_addr, vendor) else: - MAC_info["Vendor"] = "Unknown" + mac_info["Vendor"] = "Unknown" - return MAC_info + return mac_info # domain info def get_age(self, domain): @@ -249,7 +250,7 @@ def get_age(self, domain): return False domain_tld: str = self.whitelist.domain_analyzer.get_tld(domain) - if domain_tld not in whois.validTlds(): + if domain_tld not in self.valid_tlds: return False cached_data = self.db.get_domain_data(domain) @@ -259,12 +260,14 @@ def get_age(self, domain): # whois library doesn't only raise an exception, it prints the error! # the errors are the same exceptions we're handling - # temorarily change stdout to /dev/null + # so temporarily change stdout to /dev/null with open("/dev/null", "w") as f: with redirect_stdout(f) and redirect_stderr(f): # get registration date try: - creation_date = whois.query(domain).creation_date + creation_date = whois.query( + domain, timeout=2.0 + ).creation_date except Exception: return False @@ -278,13 +281,14 @@ def get_age(self, domain): self.db.set_info_for_domains(domain, {"Age": age}) return age - def shutdown_gracefully(self): + async def shutdown_gracefully(self): if hasattr(self, "asn_db"): self.asn_db.close() if hasattr(self, "country_db"): self.country_db.close() if hasattr(self, "mac_db"): self.mac_db.close() + await self.reading_mac_db_task # GW def get_gateway_ip(self): @@ -359,7 +363,8 @@ def get_gateway_mac(self, gw_ip: str): for line in arp_output.split("\n"): fields = line.split() gw_ip_from_arp_cmd = fields[1].strip("()") - # Match the gw_ip in the output with the one given to this function + # Match the gw_ip in the output with the one given to + # this function if len(fields) >= 2 and gw_ip_from_arp_cmd == gw_ip: gw_mac = fields[-4] self.db.set_default_gateway("MAC", gw_mac) @@ -370,26 +375,35 @@ def get_gateway_mac(self, gw_ip: str): return gw_mac - def check_if_we_have_pending_mac_queries(self): + def check_if_we_have_pending_offline_mac_queries(self): """ - Checks if we have pending queries in pending_mac_queries queue, and asks the db for them IF - update manager is done updating the mac db + Checks if we have pending MAC queries to get the vendor of. + These pending queries are MACs that should bee looked up in the + local downloaded mac db, but aren't because update manager hasn't + downloaded it yet for whatever reason. + queries are taken from the pending_mac_queries queue. """ - if hasattr(self, "mac_db") and not self.pending_mac_queries.empty(): - while True: - try: - mac, profileid = self.pending_mac_queries.get(timeout=0.5) - self.get_vendor(mac, profileid) + if not hasattr(self, "mac_db"): + return - except Exception: - # queue is empty - return + if self.pending_mac_queries.empty(): + return + + while True: + try: + mac, profileid = self.pending_mac_queries.get(timeout=0.5) + if vendor := self.get_vendor_offline(mac, profileid): + self.db.set_mac_vendor_to_profile(profileid, mac, vendor) + except Exception: + # queue is empty + return def wait_for_dbs(self): """ - wait for update manager to finish updating the mac db and open the rest of dbs before starting this module + wait for update manager to finish updating the mac db and open the + rest of dbs before starting this module """ - # this is the loop that controls te running on open_dbs + # this is the loop that controls tasks running on open_dbs loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) # run open_dbs in the background so we don't have @@ -465,7 +479,7 @@ def pre_main(self): if ip := self.get_gateway_ip(): self.db.set_default_gateway("IP", ip) - def handle_new_ip(self, ip): + def handle_new_ip(self, ip: str): try: # make sure its a valid ip ip_addr = ipaddress.ip_address(ip) @@ -473,37 +487,37 @@ def handle_new_ip(self, ip): # not a valid ip skip return - if not ip_addr.is_multicast: - # Do we have cached info about this ip in redis? - # If yes, load it - cached_ip_info = self.db.get_ip_info(ip) - if not cached_ip_info: - cached_ip_info = {} - - # ------ GeoCountry ------- - # Get the geocountry - if cached_ip_info == {} or "geocountry" not in cached_ip_info: - self.get_geocountry(ip) - - # ------ ASN ------- - # Get the ASN - # only update the ASN for this IP if more than 1 month - # passed since last ASN update on this IP - if self.asn.update_asn(cached_ip_info, self.update_period): - self.asn.get_asn(ip, cached_ip_info) - self.get_rdns(ip) - - def main(self): + if ip_addr.is_multicast: + return + + # Do we have cached info about this ip in redis? + # If yes, load it + cached_ip_info = self.db.get_ip_info(ip) + if not cached_ip_info: + cached_ip_info = {} + + # Get the geocountry + if cached_ip_info == {} or "geocountry" not in cached_ip_info: + self.get_geocountry(ip) + + # only update the ASN for this IP if more than 1 month + # passed since last ASN update on this IP + if self.asn.should_update_asn(cached_ip_info): + self.asn.get_asn(ip, cached_ip_info) + + self.get_rdns(ip) + + async def main(self): if msg := self.get_msg("new_MAC"): data = json.loads(msg["data"]) mac_addr: str = data["MAC"] profileid: str = data["profileid"] self.get_vendor(mac_addr, profileid) - self.check_if_we_have_pending_mac_queries() + self.check_if_we_have_pending_offline_mac_queries() # set the gw mac and ip if they're not set yet if not self.is_gw_mac_set: - # whether we found the gw ip using dhcp in profileprocess + # whether we found the gw ip using dhcp in profiler # or using ip route using self.get_gateway_ip() # now that it's found, get and store the mac addr of it if ip := self.db.get_gateway_ip(): diff --git a/slips/main.py b/slips/main.py index 04e3aa3e0..01e9681e9 100644 --- a/slips/main.py +++ b/slips/main.py @@ -11,6 +11,7 @@ from datetime import datetime from distutils.dir_util import copy_tree from typing import Set +import logging from managers.host_ip_manager import HostIPManager from managers.metadata_manager import MetadataManager @@ -25,6 +26,9 @@ from slips_files.core.database.database_manager import DBManager from slips_files.core.helpers.checker import Checker + +logging.basicConfig(level=logging.WARNING) + DAEMONIZED_MODE = "daemonized" @@ -70,7 +74,6 @@ def __init__(self, testing=False): # should be initialised after self.input_type is set self.host_ip_man = HostIPManager(self) - def check_zeek_or_bro(self): """ Check if we have zeek or bro diff --git a/slips_files/common/abstracts/module.py b/slips_files/common/abstracts/module.py index ceda6f65e..e561232d2 100644 --- a/slips_files/common/abstracts/module.py +++ b/slips_files/common/abstracts/module.py @@ -203,7 +203,8 @@ async def shutdown_gracefully(self): async def run_main(self): return await self.main() - def run_async_function(self, func: Callable): + @staticmethod + def run_async_function(func: Callable): loop = asyncio.get_event_loop() return loop.run_until_complete(func()) diff --git a/slips_files/core/helpers/flow_handler.py b/slips_files/core/helpers/flow_handler.py index d02bae58a..ae14d50c1 100644 --- a/slips_files/core/helpers/flow_handler.py +++ b/slips_files/core/helpers/flow_handler.py @@ -37,7 +37,6 @@ def new_MAC(self, mac: str, ip: str): """ if not mac or mac in ("00:00:00:00:00:00", "ff:ff:ff:ff:ff:ff"): return - # get the src and dst addresses as objects try: ip_obj = ipaddress.ip_address(ip) if ip_obj.is_multicast: @@ -45,7 +44,7 @@ def new_MAC(self, mac: str, ip: str): except ValueError: return - # send the src and dst MAC to IP_Info module to get vendor info about this MAC + # send the MAC to IP_Info module to get vendor info about it to_send = {"MAC": mac, "profileid": f"profile_{ip}"} self.db.publish("new_MAC", json.dumps(to_send)) diff --git a/slips_files/core/helpers/whitelist/domain_whitelist.py b/slips_files/core/helpers/whitelist/domain_whitelist.py index 112c6eaf3..6c931fe9e 100644 --- a/slips_files/core/helpers/whitelist/domain_whitelist.py +++ b/slips_files/core/helpers/whitelist/domain_whitelist.py @@ -110,5 +110,5 @@ def is_domain_in_tranco_list(self, domain): @staticmethod def get_tld(url: str): - """returns the top level domain from the gven url""" + """returns the top level domain from the given url""" return tldextract.extract(url).suffix diff --git a/tests/module_factory.py b/tests/module_factory.py index 830f4eb37..a696ceb47 100644 --- a/tests/module_factory.py +++ b/tests/module_factory.py @@ -45,7 +45,6 @@ from modules.ip_info.ip_info import IPInfo from slips_files.common.slips_utils import utils from slips_files.core.helpers.whitelist.whitelist import Whitelist -from tests.common_test_utils import do_nothing from modules.virustotal.virustotal import VT from managers.process_manager import ProcessManager from managers.redis_manager import RedisManager @@ -129,6 +128,7 @@ def create_db_manager_obj( output_dir, port, flush_db=flush_db, + start_sqlite=False, start_redis_server=start_redis_server, ) db.print = Mock() @@ -275,11 +275,11 @@ def create_input_obj( termination_event=Mock(), ) input.db = mock_db - input.mark_self_as_done_processing = do_nothing + input.mark_self_as_done_processing = Mock() input.bro_timeout = 1 # override the print function to avoid broken pipes input.print = Mock() - input.stop_queues = do_nothing + input.stop_queues = Mock() input.testing = True return input diff --git a/tests/test_asn_info.py b/tests/test_asn_info.py index 3764347ff..2f3ee0f23 100644 --- a/tests/test_asn_info.py +++ b/tests/test_asn_info.py @@ -170,7 +170,8 @@ def test_get_cached_asn(ip_address, first_octet, cached_data, expected_result): ) def test_update_asn(cached_data, update_period, expected_result): asn_info = ModuleFactory().create_asn_obj() - result = asn_info.update_asn(cached_data, update_period) + asn_info.update_period = update_period + result = asn_info.should_update_asn(cached_data) assert result == expected_result diff --git a/tests/test_ip_info.py b/tests/test_ip_info.py index 9a43ebcbf..143648cd0 100644 --- a/tests/test_ip_info.py +++ b/tests/test_ip_info.py @@ -1,9 +1,14 @@ """Unit test for modules/ip_info/ip_info.py""" +import asyncio + from tests.module_factory import ModuleFactory import maxminddb import pytest -from unittest.mock import Mock, patch +from unittest.mock import ( + Mock, + patch, +) import json import requests import socket @@ -366,14 +371,20 @@ def test_get_vendor_online( assert vendor == expected_vendor mock_requests.assert_called_once_with( - "https://api.macvendors.com/00:11:22:33:44:55", timeout=5 + "https://api.macvendors.com/00:11:22:33:44:55", timeout=2 ) -def test_shutdown_gracefully( +async def tmp_function(): + # Simulating some asynchronous work + await asyncio.sleep(1) + + +async def test_shutdown_gracefully( mocker, ): ip_info = ModuleFactory().create_ip_info_obj() + ip_info.reading_mac_db_task = tmp_function() mock_asn_db = mocker.Mock() mock_country_db = mocker.Mock() @@ -383,7 +394,8 @@ def test_shutdown_gracefully( ip_info.country_db = mock_country_db ip_info.mac_db = mock_mac_db - ip_info.shutdown_gracefully() + await ip_info.shutdown_gracefully() + mock_asn_db.close.assert_called_once() mock_country_db.close.assert_called_once() mock_mac_db.close.assert_called_once() @@ -446,8 +458,7 @@ def test_handle_new_ip(mocker, ip, is_multicast, cached_info, expected_calls): mock_get_geocountry = mocker.patch.object(ip_info, "get_geocountry") mock_get_asn = mocker.patch.object(ip_info.asn, "get_asn") mock_get_rdns = mocker.patch.object(ip_info, "get_rdns") - - mocker.patch.object(ip_info.asn, "update_asn", return_value=True) + ip_info.asn.update_asn = Mock(return_value=True) ip_info.handle_new_ip(ip) assert mock_get_geocountry.call_count == expected_calls.get( "get_geocountry", 0 @@ -468,11 +479,13 @@ def test_check_if_we_have_pending_mac_queries_with_mac_db( ("AA:BB:CC:DD:EE:FF", "profile_2"), Exception("Empty queue"), ] - mock_get_vendor = mocker.patch.object(ip_info, "get_vendor") - ip_info.check_if_we_have_pending_mac_queries() - assert mock_get_vendor.call_count == 2 - mock_get_vendor.assert_any_call("00:11:22:33:44:55", "profile_1") - mock_get_vendor.assert_any_call("AA:BB:CC:DD:EE:FF", "profile_2") + mock_get_vendor_offline = mocker.patch.object( + ip_info, "get_vendor_offline" + ) + ip_info.check_if_we_have_pending_offline_mac_queries() + assert mock_get_vendor_offline.call_count == 2 + mock_get_vendor_offline.assert_any_call("00:11:22:33:44:55", "profile_1") + mock_get_vendor_offline.assert_any_call("AA:BB:CC:DD:EE:FF", "profile_2") def test_check_if_we_have_pending_mac_queries_empty_queue( @@ -483,7 +496,7 @@ def test_check_if_we_have_pending_mac_queries_empty_queue( ip_info.pending_mac_queries = Mock() ip_info.pending_mac_queries.empty.return_value = True mock_get_vendor = mocker.patch.object(ip_info, "get_vendor") - ip_info.check_if_we_have_pending_mac_queries() + ip_info.check_if_we_have_pending_offline_mac_queries() mock_get_vendor.assert_not_called()