Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 32 additions & 28 deletions .github/workflows/unit_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -128,13 +128,6 @@ jobs:
${{ matrix.install }}
sudo apt install -y ninja-build python3-venv python3-pip

- name: Install python requirements for string catalog
run: |
python3 -m venv ${{github.workspace}}/test_venv
source ${{github.workspace}}/test_venv/bin/activate
pip install -r ${{github.workspace}}/tools/requirements.txt
echo "${{github.workspace}}/test_venv/bin" >> $GITHUB_PATH

- name: Restore CPM cache
env:
cache-name: cpm-cache-0
Expand All @@ -161,6 +154,14 @@ jobs:
path: ~/cpm-cache
key: ${{runner.os}}-${{env.cache-name}}-${{ hashFiles('**/CMakeLists.txt', 'cmake/**') }}

- name: Install python requirements
run: |
python3 -m venv ${{github.workspace}}/test_venv
source ${{github.workspace}}/test_venv/bin/activate
pip install -r ${{github.workspace}}/tools/requirements.txt
pip install -r ${{github.workspace}}/requirements.txt
echo "${{github.workspace}}/test_venv/bin" >> $GITHUB_PATH

- name: Build Unit Tests
run: cmake --build ${{github.workspace}}/build --config ${{matrix.build_type}} -v -t build_unit_tests

Expand Down Expand Up @@ -214,13 +215,6 @@ jobs:
${{ matrix.install }}
sudo apt install -y ninja-build python3-venv python3-pip

- name: Install python requirements for string catalog
run: |
python3 -m venv ${{github.workspace}}/test_venv
source ${{github.workspace}}/test_venv/bin/activate
pip install -r ${{github.workspace}}/tools/requirements.txt
echo "${{github.workspace}}/test_venv/bin" >> $GITHUB_PATH

- name: Restore CPM cache
env:
cache-name: cpm-cache-0
Expand All @@ -247,6 +241,14 @@ jobs:
path: ~/cpm-cache
key: ${{runner.os}}-${{env.cache-name}}-${{ hashFiles('**/CMakeLists.txt', 'cmake/**') }}

- name: Install python requirements
run: |
python3 -m venv ${{github.workspace}}/test_venv
source ${{github.workspace}}/test_venv/bin/activate
pip install -r ${{github.workspace}}/tools/requirements.txt
pip install -r ${{github.workspace}}/requirements.txt
echo "${{github.workspace}}/test_venv/bin" >> $GITHUB_PATH

- name: Build Unit Tests
run: cmake --build ${{github.workspace}}/build --config ${{matrix.build_type}} -v -t build_unit_tests

Expand Down Expand Up @@ -341,13 +343,6 @@ jobs:
${{ matrix.install }}
sudo apt install -y ninja-build python3-venv python3-pip

- name: Install python requirements for string catalog
run: |
python3 -m venv ${{github.workspace}}/test_venv
source ${{github.workspace}}/test_venv/bin/activate
pip install -r ${{github.workspace}}/tools/requirements.txt
echo "${{github.workspace}}/test_venv/bin" >> $GITHUB_PATH

- name: Restore CPM cache
env:
cache-name: cpm-cache-0
Expand Down Expand Up @@ -382,6 +377,14 @@ jobs:
# using leading to random crashes: https://reviews.llvm.org/D148280
run: sudo sysctl vm.mmap_rnd_bits=28

- name: Install python requirements
run: |
python3 -m venv ${{github.workspace}}/test_venv
source ${{github.workspace}}/test_venv/bin/activate
pip install -r ${{github.workspace}}/tools/requirements.txt
pip install -r ${{github.workspace}}/requirements.txt
echo "${{github.workspace}}/test_venv/bin" >> $GITHUB_PATH

- name: Build Unit Tests
run: cmake --build ${{github.workspace}}/build -t unit_tests

Expand All @@ -394,13 +397,6 @@ jobs:
run: |
sudo apt update && sudo apt install -y gcc-${{env.DEFAULT_GCC_VERSION}} g++-${{env.DEFAULT_GCC_VERSION}} ninja-build python3-venv python3-pip valgrind

- name: Install python requirements for string catalog
run: |
python3 -m venv ${{github.workspace}}/test_venv
source ${{github.workspace}}/test_venv/bin/activate
pip install -r ${{github.workspace}}/tools/requirements.txt
echo "${{github.workspace}}/test_venv/bin" >> $GITHUB_PATH

- name: Restore CPM cache
env:
cache-name: cpm-cache-0
Expand All @@ -427,6 +423,14 @@ jobs:
path: ~/cpm-cache
key: ${{runner.os}}-${{env.cache-name}}-${{ hashFiles('**/CMakeLists.txt', 'cmake/**') }}

- name: Install python requirements
run: |
python3 -m venv ${{github.workspace}}/test_venv
source ${{github.workspace}}/test_venv/bin/activate
pip install -r ${{github.workspace}}/tools/requirements.txt
pip install -r ${{github.workspace}}/requirements.txt
echo "${{github.workspace}}/test_venv/bin" >> $GITHUB_PATH

- name: Build Unit Tests
run: cmake --build ${{github.workspace}}/build -t build_unit_tests

Expand Down
8 changes: 4 additions & 4 deletions include/log/catalog/catalog.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ using module_id = std::uint32_t;
template <typename> extern auto catalog() -> string_id;
template <typename> extern auto module() -> module_id;

struct encode_32;
struct encode_64;
struct encode_u32;
struct encode_u64;
template <typename> struct encode_32;
template <typename> struct encode_64;
template <typename> struct encode_u32;
template <typename> struct encode_u64;
20 changes: 7 additions & 13 deletions include/log/catalog/mipi_builder.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,39 +16,33 @@

namespace logging::mipi {
template <typename T>
concept signed_packable =
std::signed_integral<T> and sizeof(T) <= sizeof(std::int64_t);
concept signed_packable = std::signed_integral<stdx::underlying_type_t<T>> and
sizeof(T) <= sizeof(std::int64_t);

template <typename T>
concept unsigned_packable =
std::unsigned_integral<T> and sizeof(T) <= sizeof(std::int64_t);
std::unsigned_integral<stdx::underlying_type_t<T>> and
sizeof(T) <= sizeof(std::int64_t);

template <typename T>
concept enum_packable = std::is_enum_v<T> and sizeof(T) <= sizeof(std::int32_t);

template <typename T>
concept packable =
signed_packable<T> or unsigned_packable<T> or enum_packable<T>;
concept packable = signed_packable<T> or unsigned_packable<T>;

template <typename T> struct encoding;

template <signed_packable T> struct encoding<T> {
using encode_t = stdx::conditional_t<sizeof(T) <= sizeof(std::int32_t),
encode_32, encode_64>;
encode_32<T>, encode_64<T>>;
using pack_t = stdx::conditional_t<sizeof(T) <= sizeof(std::int32_t),
std::int32_t, std::int64_t>;
};

template <unsigned_packable T> struct encoding<T> {
using encode_t = stdx::conditional_t<sizeof(T) <= sizeof(std::int32_t),
encode_u32, encode_u64>;
encode_u32<T>, encode_u64<T>>;
using pack_t = stdx::conditional_t<sizeof(T) <= sizeof(std::uint32_t),
std::uint32_t, std::uint64_t>;
};

template <enum_packable T>
struct encoding<T> : encoding<stdx::underlying_type_t<T>> {};

template <packable T> using pack_as_t = typename encoding<T>::pack_t;
template <packable T> using encode_as_t = typename encoding<T>::encode_t;

Expand Down
21 changes: 11 additions & 10 deletions test/log/encoder.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -146,17 +146,18 @@ TEST_CASE("argument packing", "[mipi]") {
}

TEST_CASE("argument encoding", "[mipi]") {
static_assert(std::same_as<logging::mipi::encode_as_t<std::int32_t>,
encode_32<std::int32_t>>);
static_assert(std::same_as<logging::mipi::encode_as_t<std::uint32_t>,
encode_u32<std::uint32_t>>);
static_assert(std::same_as<logging::mipi::encode_as_t<std::int64_t>,
encode_64<std::int64_t>>);
static_assert(std::same_as<logging::mipi::encode_as_t<std::uint64_t>,
encode_u64<std::uint64_t>>);
static_assert(
std::same_as<logging::mipi::encode_as_t<std::int32_t>, encode_32>);
static_assert(
std::same_as<logging::mipi::encode_as_t<std::uint32_t>, encode_u32>);
static_assert(
std::same_as<logging::mipi::encode_as_t<std::int64_t>, encode_64>);
static_assert(
std::same_as<logging::mipi::encode_as_t<std::uint64_t>, encode_u64>);
static_assert(std::same_as<logging::mipi::encode_as_t<char>, encode_32>);
static_assert(
std::same_as<logging::mipi::encode_as_t<unsigned char>, encode_u32>);
std::same_as<logging::mipi::encode_as_t<char>, encode_32<char>>);
static_assert(std::same_as<logging::mipi::encode_as_t<unsigned char>,
encode_u32<unsigned char>>);
}

TEST_CASE("log zero arguments", "[mipi]") {
Expand Down
37 changes: 21 additions & 16 deletions test/msg/gen_handler_data.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,40 @@
import random


big_vals = [int(random.expovariate(10) * (1 << 28)) & 0xffffffff for i in range(0, 100)]
med_vals = [int(random.expovariate(10) * (1 << 14)) & 0xffff for i in range(0, 50)]
small_vals = [int(random.expovariate(10) * (1 << 6)) & 0xff for i in range(0, 25)]
big_vals = [int(random.expovariate(10) * (1 << 28)) & 0xFFFFFFFF for i in range(0, 100)]
med_vals = [int(random.expovariate(10) * (1 << 14)) & 0xFFFF for i in range(0, 50)]
small_vals = [int(random.expovariate(10) * (1 << 6)) & 0xFF for i in range(0, 25)]

combos = [(
random.choice(big_vals),
random.choice(med_vals),
random.choice(small_vals)
) for i in range(0, 256)]
combos = [
(random.choice(big_vals), random.choice(med_vals), random.choice(small_vals))
for i in range(0, 256)
]


print("""
print(
"""
template<typename T>
struct test_project {
constexpr static auto config = cib::config(
cib::exports<T>,
cib::extend<T>(""")
cib::extend<T>("""
)
for c in combos:
print(f" cb<{c[0]}, {c[1]}, {c[2]}>,")
print(
""" )
""" )
);
};
""")
"""
)

print("""
auto msgs = std::array{""")
print(
"""
auto msgs = std::array{"""
)
for c in combos:
print(f" m<{c[0]}, {c[1]}, {c[2]}>,")
print(
""" };
""")
""" };
"""
)
2 changes: 2 additions & 0 deletions tools/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
mypy_lint(FILES gen_str_catalog.py)

add_unit_test("gen_str_catalog_test" PYTEST FILES "gen_str_catalog_test.py")
60 changes: 37 additions & 23 deletions tools/benchmark/parse_bench_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,24 +4,25 @@
import csv
import re


def parse_file(file_path):
data = {}
current_dataset = None
current_algorithm = None
current_size = None

with open(file_path, 'r') as file:
with open(file_path, "r") as file:
for line in file:
line = line.strip()

if line.startswith('dataset:'):
current_dataset = line.split(':')[1].strip()
elif line.startswith('algorithm:'):
current_algorithm = line.split(':')[1].strip()
elif line.startswith('size:'):
current_size = int(line.split(':')[1].strip())
elif line.startswith('|'):
fields = line.split('|')[1:]
if line.startswith("dataset:"):
current_dataset = line.split(":")[1].strip()
elif line.startswith("algorithm:"):
current_algorithm = line.split(":")[1].strip()
elif line.startswith("size:"):
current_size = int(line.split(":")[1].strip())
elif line.startswith("|"):
fields = line.split("|")[1:]
if len(fields) >= 5:
try:
ns_op = float(fields[0].strip())
Expand All @@ -34,44 +35,57 @@ def parse_file(file_path):
data[current_dataset] = {}
if current_algorithm not in data[current_dataset]:
data[current_dataset][current_algorithm] = {
'size': current_size
"size": current_size
}

if "chained" in key_gen_type:
data[current_dataset][current_algorithm]['ns_op_chained'] = ns_op
data[current_dataset][current_algorithm][
"ns_op_chained"
] = ns_op
elif "independent" in key_gen_type:
data[current_dataset][current_algorithm]['ns_op_independent'] = ns_op

data[current_dataset][current_algorithm][
"ns_op_independent"
] = ns_op

return data


def generate_csv_tables(data, output_prefix):
datasets = list(data.keys())
algorithms = sorted(set(algo for dataset in data.values() for algo in dataset.keys()))
algorithms = sorted(
set(algo for dataset in data.values() for algo in dataset.keys())
)

# Table 1: Chained Algorithm Performance (ns/op) vs Dataset
with open(f'{output_prefix}_chained_performance.csv', 'w', newline='') as file:
with open(f"{output_prefix}_chained_performance.csv", "w", newline="") as file:
writer = csv.writer(file)
writer.writerow(['Dataset'] + algorithms)
writer.writerow(["Dataset"] + algorithms)
for dataset in datasets:
row = [dataset] + [data[dataset].get(algo, {}).get('ns_op_chained', '') for algo in algorithms]
row = [dataset] + [
data[dataset].get(algo, {}).get("ns_op_chained", "")
for algo in algorithms
]
writer.writerow(row)

# Table 2: Independent Algorithm Performance (ns/op) vs Dataset
with open(f'{output_prefix}_independent_performance.csv', 'w', newline='') as file:
with open(f"{output_prefix}_independent_performance.csv", "w", newline="") as file:
writer = csv.writer(file)
writer.writerow(['Dataset'] + algorithms)
writer.writerow(["Dataset"] + algorithms)
for dataset in datasets:
row = [dataset] + [data[dataset].get(algo, {}).get('ns_op_independent', '') for algo in algorithms]
row = [dataset] + [
data[dataset].get(algo, {}).get("ns_op_independent", "")
for algo in algorithms
]
writer.writerow(row)

# Table 3: Algorithm Size (bytes) vs Dataset
with open(f'{output_prefix}_algorithm_size.csv', 'w', newline='') as file:
with open(f"{output_prefix}_algorithm_size.csv", "w", newline="") as file:
writer = csv.writer(file)
writer.writerow(['Dataset'] + algorithms)
writer.writerow(["Dataset"] + algorithms)
for dataset in datasets:
row = [dataset] + [data[dataset].get(algo, {}).get('size', '') for algo in algorithms]
row = [dataset] + [
data[dataset].get(algo, {}).get("size", "") for algo in algorithms
]
writer.writerow(row)


Expand Down
Loading
Loading