Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ updates:
commit-message:
prefix: "[deps] "
- package-ecosystem: "github-actions" # Check for GitHub Actions updates
directory: "/"
directory: "/"
schedule:
interval: "monthly" # Check for updates weekly
commit-message:
Expand Down
63 changes: 31 additions & 32 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ on:
- gsoc23

jobs:

build:
name: Python==${{ matrix.python-version }}
runs-on: ubuntu-22.04
Expand All @@ -26,42 +25,42 @@ jobs:
- "3.13"

steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
cache-dependency-path: |
**/requirements*.txt
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
cache-dependency-path: |
**/requirements*.txt

- name: Install Dependencies
id: deps
run: |
pip install -U pip wheel setuptools
pip install -U -r requirements-test.txt
pip install -U -e .
- name: Install Dependencies
id: deps
run: |
pip install -U pip wheel setuptools
pip install -U -r requirements-test.txt
pip install -U -e .

- name: QA checks
run: ./run-qa-checks
- name: QA checks
run: ./run-qa-checks

- name: Tests
if: ${{ !cancelled() && steps.deps.conclusion == 'success' }}
run: |
coverage run runtests.py
coverage xml
- name: Tests
if: ${{ !cancelled() && steps.deps.conclusion == 'success' }}
run: |
coverage run runtests.py
coverage xml

- name: Upload Coverage
if: ${{ success() }}
uses: coverallsapp/github-action@v2
with:
parallel: true
format: cobertura
flag-name: python-${{ matrix.env.env }}
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Upload Coverage
if: ${{ success() }}
uses: coverallsapp/github-action@v2
with:
parallel: true
format: cobertura
flag-name: python-${{ matrix.env.env }}
github-token: ${{ secrets.GITHUB_TOKEN }}

coveralls:
needs: build
Expand Down
26 changes: 13 additions & 13 deletions .github/workflows/pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,16 @@ jobs:
permissions:
id-token: write
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Install dependencies
run: |
pip install -U pip
pip install build
- name: Build package
run: python -m build
- name: Publish package distributions to PyPI
uses: pypa/[email protected]
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install dependencies
run: |
pip install -U pip
pip install build
- name: Build package
run: python -m build
- name: Publish package distributions to PyPI
uses: pypa/[email protected]
4 changes: 1 addition & 3 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -309,9 +309,7 @@ HTTPS example with self-signed SSL certificate using ``verify=False``:

from netdiff import NetJsonParser

OlsrParser(
url="https://myserver.mydomain.com/topology.json", verify=False
)
OlsrParser(url="https://myserver.mydomain.com/topology.json", verify=False)

NetJSON output
--------------
Expand Down
2 changes: 1 addition & 1 deletion netdiff/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ class ConversionException(NetdiffException):
"""

def __init__(self, *args, **kwargs):
self.data = kwargs.pop('data')
self.data = kwargs.pop("data")


class ParserError(NetdiffException):
Expand Down
14 changes: 7 additions & 7 deletions netdiff/info.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
VERSION = (1, 2, 0, 'alpha')
VERSION = (1, 2, 0, "alpha")
__version__ = VERSION


def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
version = "%s.%s" % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
version = "%s.%s" % (version, VERSION[2])
if VERSION[3:] == ("alpha", 0):
version = "%s pre-alpha" % version
else:
if VERSION[3] != 'final':
if VERSION[3] != "final":
try:
rev = VERSION[4]
except IndexError:
rev = 0
version = '%s%s%s' % (version, VERSION[3][0:1], rev)
version = "%s%s%s" % (version, VERSION[3][0:1], rev)
return version
16 changes: 8 additions & 8 deletions netdiff/parsers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ def __init__(
data = self._get_file(file)
elif data is None and url is None and file is None:
raise ValueError(
'no topology data supplied, on of the following arguments'
'must be supplied: data, url or file'
"no topology data supplied, on of the following arguments"
"must be supplied: data, url or file"
)
self.original_data = self.to_python(data)
# avoid throwing NotImplementedError in tests
Expand All @@ -76,9 +76,9 @@ def __init__(

def _get_url(self, url):
url = urlparse.urlparse(url)
if url.scheme in ['http', 'https']:
if url.scheme in ["http", "https"]:
return self._get_http(url)
if url.scheme == 'telnet':
if url.scheme == "telnet":
return self._get_telnet(url)

def __sub__(self, other):
Expand All @@ -102,7 +102,7 @@ def to_python(self, data):
return json.loads(data)
except ValueError:
pass
raise ConversionException('Could not recognize format', data=data)
raise ConversionException("Could not recognize format", data=data)

def _get_file(self, path):
try:
Expand All @@ -118,7 +118,7 @@ def _get_http(self, url):
except Exception as e:
raise TopologyRetrievalError(e)
if response.status_code != 200:
msg = 'Expecting HTTP 200 ok, got {0}'.format(response.status_code)
msg = "Expecting HTTP 200 ok, got {0}".format(response.status_code)
raise TopologyRetrievalError(msg)
return response.content.decode()

Expand All @@ -127,8 +127,8 @@ def _get_telnet(self, url):
tn = telnetlib.Telnet(url.hostname, url.port, timeout=self.timeout)
except Exception as e:
raise TopologyRetrievalError(e)
tn.write(("\r\n").encode('ascii'))
data = tn.read_all().decode('ascii')
tn.write(("\r\n").encode("ascii"))
data = tn.read_all().decode("ascii")
tn.close()
return data

Expand Down
48 changes: 24 additions & 24 deletions netdiff/parsers/batman.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@
class BatmanParser(BaseParser):
"""batman-adv parser"""

protocol = 'batman-adv'
version = '2015.0'
metric = 'TQ'
protocol = "batman-adv"
version = "2015.0"
metric = "TQ"

# the default expected format
_format = 'alfred_vis'
_format = "alfred_vis"

def to_python(self, data):
"""
Expand All @@ -25,20 +25,20 @@ def _txtinfo_to_python(self, data):
"""
Converts txtinfo format to python
"""
self._format = 'txtinfo'
self._format = "txtinfo"
# find interesting section
lines = data.split('\n')
lines = data.split("\n")
try:
start = lines.index('Table: Topology') + 2
start = lines.index("Table: Topology") + 2
except ValueError:
raise ParserError('Unrecognized format')
raise ParserError("Unrecognized format")
topology_lines = [line for line in lines[start:] if line]
# convert to python list
parsed_lines = []
for line in topology_lines:
values = line.split(' ')
values = line.split(" ")
parsed_lines.append(
{'source': values[0], 'target': values[1], 'cost': float(values[4])}
{"source": values[0], "target": values[1], "cost": float(values[4])}
)
return parsed_lines

Expand All @@ -59,9 +59,9 @@ def _get_aggregated_node_list(self, data):
"""
node_list = []
for node in data:
local_addresses = [node['primary']]
if 'secondary' in node:
local_addresses += node['secondary']
local_addresses = [node["primary"]]
if "secondary" in node:
local_addresses += node["secondary"]
node_list.append(local_addresses)
return node_list

Expand All @@ -72,7 +72,7 @@ def parse(self, data):
* alfred_vis
* txtinfo
"""
method = getattr(self, '_parse_{0}'.format(self._format))
method = getattr(self, "_parse_{0}".format(self._format))
return method(data)

def _parse_alfred_vis(self, data):
Expand All @@ -83,26 +83,26 @@ def _parse_alfred_vis(self, data):
"""
# initialize graph and list of aggregated nodes
graph = self._init_graph()
if 'source_version' in data:
self.version = data['source_version']
if 'vis' not in data:
if "source_version" in data:
self.version = data["source_version"]
if "vis" not in data:
raise ParserError('Parse error, "vis" key not found')
node_list = self._get_aggregated_node_list(data['vis'])
node_list = self._get_aggregated_node_list(data["vis"])

# loop over topology section and create networkx graph
for node in data["vis"]:
for neigh in node["neighbors"]:
graph.add_node(
node['primary'],
node["primary"],
**{
'local_addresses': node.get('secondary', []),
'clients': node.get('clients', []),
"local_addresses": node.get("secondary", []),
"clients": node.get("clients", []),
}
)
primary_neigh = self._get_primary_address(neigh['neighbor'], node_list)
primary_neigh = self._get_primary_address(neigh["neighbor"], node_list)
# networkx automatically ignores duplicated edges
graph.add_edge(
node['primary'], primary_neigh, weight=float(neigh['metric'])
node["primary"], primary_neigh, weight=float(neigh["metric"])
)
return graph

Expand All @@ -113,5 +113,5 @@ def _parse_txtinfo(self, data):
"""
graph = self._init_graph()
for link in data:
graph.add_edge(link['source'], link['target'], weight=link['cost'])
graph.add_edge(link["source"], link["target"], weight=link["cost"])
return graph
18 changes: 9 additions & 9 deletions netdiff/parsers/bmx6.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
class Bmx6Parser(BaseParser):
"""Bmx6_b6m parser"""

protocol = 'BMX6_b6m'
version = '0'
metric = 'none'
protocol = "BMX6_b6m"
version = "0"
metric = "none"

def parse(self, data):
"""
Expand All @@ -22,13 +22,13 @@ def parse(self, data):
# loop over topology section and create networkx graph
# this data structure does not contain cost information, so we set it as 1
for node in data:
for link in node['links']:
cost = (link['txRate'] + link['rxRate']) / 2.0
for link in node["links"]:
cost = (link["txRate"] + link["rxRate"]) / 2.0
graph.add_edge(
node['name'],
link['name'],
node["name"],
link["name"],
weight=cost,
tx_rate=link['txRate'],
rx_rate=link['rxRate'],
tx_rate=link["txRate"],
rx_rate=link["rxRate"],
)
return graph
8 changes: 4 additions & 4 deletions netdiff/parsers/cnml.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,22 +14,22 @@
class CnmlParser(BaseParser):
"""CNML 0.1 parser"""

protocol = 'static'
protocol = "static"
version = None
metric = None

def to_python(self, data):
if isinstance(data, str):
up = urlparse.urlparse(data)
# if it looks like a file path
if os.path.isfile(data) or up.scheme in ['http', 'https']:
if os.path.isfile(data) or up.scheme in ["http", "https"]:
return libcnml.CNMLParser(data)
else:
raise ParserError('Could not decode CNML data')
raise ParserError("Could not decode CNML data")
elif isinstance(data, libcnml.CNMLParser):
return data
else:
raise ParserError('Could not find valid data to parse')
raise ParserError("Could not find valid data to parse")

def parse(self, data):
"""
Expand Down
Loading