diff --git a/src/lighthouseweb3/__init__.py b/src/lighthouseweb3/__init__.py index b1d8d7c..f0562ee 100644 --- a/src/lighthouseweb3/__init__.py +++ b/src/lighthouseweb3/__init__.py @@ -2,6 +2,7 @@ import os import io +from typing import List, Dict, Any from .functions import ( upload as d, deal_status, @@ -16,7 +17,11 @@ remove_ipns_record as removeIpnsRecord, create_wallet as createWallet ) - +from .functions.kavach import ( + generate, + recover_key as recoverKey, + shard_key as shardKey +) class Lighthouse: def __init__(self, token: str = ""): @@ -224,3 +229,52 @@ def getTagged(self, tag: str): except Exception as e: raise e +class Kavach: + """ + Kavach is a simple library for generating and managing secrets. + + It uses Shamir's Secret Sharing algorithm to split a secret into multiple shares. + """ + + @staticmethod + def generate(threshold: int, keyCount: int) -> List[Dict[str, Any]]: + """ + Generates a set of key shards with a given threshold and key count. + + :param threshold: int, The minimum number of shards required to recover the key. + :param keyCount: int, The number of shards to generate. + :return: List[Dict[str, Any]], A list of key shards. + """ + try: + return generate.generate(threshold, keyCount) + except Exception as e: + raise e + + + @staticmethod + def recoverKey(keyShards: List[Dict[str, Any]]) -> int: + """ + Recovers a key from a set of key shards. + + :param keyShards: List[Dict[str, Any]], A list of key shards. + :return: int, The recovered key. + """ + try: + return recoverKey.recover_key(keyShards) + except Exception as e: + raise e + + @staticmethod + def shardKey(masterKey: int, threshold: int, keyCount: int) -> List[Dict[str, Any]]: + """ + Splits a master key into multiple shards. + + :param masterKey: int, The master key to be split. + :param threshold: int, The minimum number of shards required to recover the key. + :param keyCount: int, The number of shards to generate. + :return: List[Dict[str, Any]], A list of key shards. + """ + try: + return shardKey.shard_key(masterKey, threshold, keyCount) + except Exception as e: + raise e \ No newline at end of file diff --git a/src/lighthouseweb3/functions/kavach/__init__.py b/src/lighthouseweb3/functions/kavach/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/lighthouseweb3/functions/kavach/config.py b/src/lighthouseweb3/functions/kavach/config.py new file mode 100644 index 0000000..bcc79dc --- /dev/null +++ b/src/lighthouseweb3/functions/kavach/config.py @@ -0,0 +1,2 @@ +#A 257-bit prime to accommodate 256-bit secrets +PRIME = 2**256 + 297 \ No newline at end of file diff --git a/src/lighthouseweb3/functions/kavach/generate.py b/src/lighthouseweb3/functions/kavach/generate.py new file mode 100644 index 0000000..bc700e8 --- /dev/null +++ b/src/lighthouseweb3/functions/kavach/generate.py @@ -0,0 +1,45 @@ +import secrets +import logging +from typing import Dict, List, Any +from .shard_key import shard_key + +logger = logging.getLogger(__name__) + +async def generate(threshold: int = 3, key_count: int = 5) -> Dict[str, Any]: + """ + Generate threshold cryptography key shards using Shamir's Secret Sharing + + Args: + threshold: Minimum number of shards needed to reconstruct the secret + key_count: Total number of key shards to generate + + Returns: + { + "masterKey": "", + "keyShards": [ + { + "key": "", + "index": "" + } + ] + } + """ + logger.info(f"Generating key shards with threshold={threshold}, key_count={key_count}") + + try: + random_int = secrets.randbits(256) + master_key = f"0x{random_int:064x}" + + result = await shard_key(master_key, threshold, key_count) + + if not result['isShardable']: + raise ValueError(result['error']) + + return { + "masterKey": master_key, + "keyShards": result['keyShards'] + } + + except Exception as e: + logger.error(f"Error during key generation: {str(e)}") + raise e diff --git a/src/lighthouseweb3/functions/kavach/recover_key.py b/src/lighthouseweb3/functions/kavach/recover_key.py new file mode 100644 index 0000000..a7970b7 --- /dev/null +++ b/src/lighthouseweb3/functions/kavach/recover_key.py @@ -0,0 +1,178 @@ +from typing import List, Dict, Any +import logging +from .config import PRIME + +logger = logging.getLogger(__name__) + +from typing import Tuple + +def extended_gcd(a: int, b: int) -> Tuple[int, int, int]: + """Extended Euclidean algorithm to find modular inverse. + + Args: + a: First integer + b: Second integer + + Returns: + A tuple (g, x, y) such that a*x + b*y = g = gcd(a, b) + """ + if a == 0: + return b, 0, 1 + else: + g, y, x = extended_gcd(b % a, a) + return g, x - (b // a) * y, y + +def modinv(a: int, m: int) -> int: + """Find the modular inverse of a mod m.""" + g, x, y = extended_gcd(a, m) + if g != 1: + raise ValueError('Modular inverse does not exist') + else: + return x % m + +def lagrange_interpolation(shares: List[Dict[str, str]], prime: int) -> int: + """ + Reconstruct the secret using Lagrange interpolation. + + Args: + shares: List of dictionaries with 'key' and 'index' fields + prime: The prime number used in the finite field + + Returns: + The reconstructed secret as integer + + Raises: + ValueError: If there are duplicate indices + """ + + points = [] + seen_indices = set() + + for i, share in enumerate(shares): + try: + key_str, index_str = validate_share(share, i) + x = int(index_str, 16) + + if x in seen_indices: + raise ValueError(f"Duplicate share index found: 0x{x:x}") + seen_indices.add(x) + + y = int(key_str, 16) + points.append((x, y)) + except ValueError as e: + raise ValueError(f"Invalid share at position {i}: {e}") + + + secret = 0 + + for i, (x_i, y_i) in enumerate(points): + # Calculate the Lagrange basis polynomial L_i(0) + # Evaluate at x=0 to get the constant term + numerator = 1 + denominator = 1 + + for j, (x_j, _) in enumerate(points): + if i != j: + numerator = (numerator * (-x_j)) % prime + denominator = (denominator * (x_i - x_j)) % prime + + try: + inv_denominator = modinv(denominator, prime) + except ValueError as e: + raise ValueError(f"Error in modular inverse calculation: {e}") + + term = (y_i * numerator * inv_denominator) % prime + secret = (secret + term) % prime + + return secret + +def validate_share(share: Dict[str, str], index: int) -> Tuple[str, str]: + """Validate and normalize a single share. + + Args: + share: Dictionary containing 'key' and 'index' fields + index: Position of the share in the input list (for error messages) + + Returns: + Tuple of (normalized_key, normalized_index) as strings without '0x' prefix + + Raises: + ValueError: If the share is invalid + """ + if not isinstance(share, dict): + raise ValueError(f"Share at index {index} must be a dictionary") + + if 'key' not in share or 'index' not in share: + raise ValueError(f"Share at index {index} is missing required fields 'key' or 'index'") + + key_str = str(share['key']).strip().lower() + index_str = str(share['index']).strip().lower() + + if key_str.startswith('0x'): + key_str = key_str[2:] + if index_str.startswith('0x'): + index_str = index_str[2:] + + if not key_str: + raise ValueError(f"Empty key in share at index {index}") + if not index_str: + raise ValueError(f"Empty index in share at index {index}") + + if len(key_str) % 2 != 0: + key_str = '0' + key_str + + if len(index_str) % 2 != 0: + index_str = '0' + index_str + + try: + bytes.fromhex(key_str) + except ValueError: + raise ValueError(f"Invalid key format in share at index {index}: must be a valid hex string") + + try: + bytes.fromhex(index_str) + except ValueError: + raise ValueError(f"Invalid index format in share at index {index}: must be a valid hex string") + + index_int = int(index_str, 16) + if not (0 <= index_int <= 0xFFFFFFFF): + raise ValueError(f"Index out of range in share at index {index}: must be between 0 and 2^32-1") + + return key_str, index_str + +async def recover_key(keyShards: List[Dict[str, str]]) -> Dict[str, Any]: + """ + Recover the master key from a subset of key shares using Lagrange interpolation. + + Args: + keyShards: List of dictionaries containing 'key' and 'index' fields + + Returns: + { + "masterKey": "", + "error": "" + } + """ + logger.info(f"Attempting to recover master key from {len(keyShards)} shares") + + try: + for i, share in enumerate(keyShards): + validate_share(share, i) + secret = lagrange_interpolation(keyShards, PRIME) + master_key = f"0x{secret:064x}" + return { + "masterKey": master_key, + "error": None + } + except ValueError as e: + logger.error(f"Validation error during key recovery: {str(e)}") + return { + "masterKey": None, + "error": f"Validation error: {str(e)}" + } + except Exception as e: + logger.error(f"Error during key recovery: {str(e)}") + return { + "masterKey": None, + "error": f"Recovery error: {str(e)}" + } diff --git a/src/lighthouseweb3/functions/kavach/shard_key.py b/src/lighthouseweb3/functions/kavach/shard_key.py new file mode 100644 index 0000000..4910965 --- /dev/null +++ b/src/lighthouseweb3/functions/kavach/shard_key.py @@ -0,0 +1,111 @@ +import secrets +import logging +from typing import Dict, List, Any +from .config import PRIME + +logger = logging.getLogger(__name__) + +def evaluate_polynomial(coefficients: List[int], x: int, prime: int) -> int: + """ + Evaluate a polynomial with given coefficients at point x. + coefficients[0] is constant term (the secret), coefficients[1] is x coefficient, etc. + + Args: + coefficients: List of coefficients where coefficients[0] is the constant term + x: Point at which to evaluate the polynomial + prime: Prime number for the finite field + + Returns: + Value of polynomial at point x + """ + result = 0 + x_power = 1 # x^0 = 1 + + for coefficient in coefficients: + result = (result + coefficient * x_power) % prime + x_power = (x_power * x) % prime + + return result + +def validate_key(key: str) -> bool: + """ + Validate that the given key is a valid 32-byte (64 hex char) string. + """ + try: + if key.startswith('0x'): + key = key[2:] + bytes.fromhex(key) + return len(key) == 64 + except ValueError: + return False + +async def shard_key(key: str, threshold: int = 3, key_count: int = 5) -> Dict[str, Any]: + """ + Generate threshold cryptography key shards using Shamir's Secret Sharing + + Args: + key: The key to be shared + threshold: Minimum number of shards needed to reconstruct the secret + key_count: Total number of key shards to generate + + Returns: + { + "isShardable": true, + "keyShards": [ + { + "key": "", + "index": "" + } + ] + } + """ + logger.info(f"Generating key shards with threshold={threshold}, key_count={key_count}") + + if not validate_key(key): + raise ValueError("Invalid key format: must be a valid hex string") + + key_int = int(key, 16) + + try: + if threshold > key_count: + raise ValueError("key_count must be greater than or equal to threshold") + if threshold < 1 or key_count < 1: + raise ValueError("threshold and key_count must be positive integers") + + msk = [key_int] + + for i in range(threshold - 1): + random_coeff = secrets.randbelow(PRIME) + msk.append(random_coeff) + + idVec = [] + used_ids = set() + + for i in range(key_count): + while True: + id_vec = secrets.randbits(32) + + if id_vec != 0 and id_vec not in used_ids and id_vec < PRIME: + idVec.append(id_vec) + used_ids.add(id_vec) + break + + secVec = [] + for i in range(key_count): + y = evaluate_polynomial(msk, idVec[i], PRIME) + secVec.append(y) + + result = { + "isShardable": True, + "keyShards": [{"key": hex(secVec[i]), "index": hex(idVec[i])} for i in range(key_count)] + } + + except Exception as e: + logger.error(f"Error generating key shards: {str(e)}") + result = { + "isShardable": False, + "error": str(e) + } + + return result + diff --git a/tests/tests_kavach/__init__.py b/tests/tests_kavach/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/tests_kavach/test_generate.py b/tests/tests_kavach/test_generate.py new file mode 100644 index 0000000..386fe79 --- /dev/null +++ b/tests/tests_kavach/test_generate.py @@ -0,0 +1,79 @@ +import unittest +import asyncio +import logging +from src.lighthouseweb3 import Kavach + +logger = logging.getLogger(__name__) + +class TestGenerate(unittest.TestCase): + """Test cases for the generate module.""" + + def test_generate_basic(self): + """Test basic key generation with default parameters.""" + async def run_test(): + result = await Kavach.generate(threshold=2, keyCount=3) + + self.assertIn('masterKey', result) + self.assertIn('keyShards', result) + + # Check master key format (hex string with 0x prefix) + self.assertIsInstance(result['masterKey'], str) + self.assertTrue(result['masterKey'].startswith('0x')) + self.assertTrue(all(c in '0123456789abcdef' for c in result['masterKey'][2:])) + + # Check key shards + self.assertEqual(len(result['keyShards']), 3) + for shard in result['keyShards']: + self.assertIn('key', shard) + self.assertIn('index', shard) + + # Check key format (hex string with 0x prefix) + self.assertTrue(shard['key'].startswith('0x')) + self.assertTrue(all(c in '0123456789abcdef' for c in shard['key'][2:])) + + # Check index format (hex string with 0x prefix) + self.assertTrue(shard['index'].startswith('0x')) + self.assertTrue(all(c in '0123456789abcdef' for c in shard['index'][2:])) + + return result + + return asyncio.run(run_test()) + + def test_generate_custom_parameters(self): + """Test key generation with custom parameters.""" + async def run_test(): + threshold = 3 + key_count = 5 + + result = await Kavach.generate(threshold=threshold, keyCount=key_count) + + self.assertEqual(len(result['keyShards']), key_count) + + # Check all indices are present and unique + indices = [shard['index'] for shard in result['keyShards']] + self.assertEqual(len(set(indices)), key_count) # All unique + + # Verify all indices are valid hex strings with 0x prefix + for index in indices: + self.assertTrue(index.startswith('0x')) + self.assertTrue(all(c in '0123456789abcdef' for c in index[2:])) + + return result + + return asyncio.run(run_test()) + + def test_invalid_threshold(self): + """Test that invalid threshold raises an error.""" + async def run_test(): + with self.assertRaises(ValueError) as context: + await Kavach.generate(threshold=0, keyCount=3) + self.assertIn("must be positive integers", str(context.exception)) + + with self.assertRaises(ValueError) as context: + await Kavach.generate(threshold=4, keyCount=3) + self.assertIn("must be greater than or equal to threshold", str(context.exception)) + + return asyncio.run(run_test()) + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/tests/tests_kavach/test_recover_key.py b/tests/tests_kavach/test_recover_key.py new file mode 100644 index 0000000..3ea2d30 --- /dev/null +++ b/tests/tests_kavach/test_recover_key.py @@ -0,0 +1,145 @@ +import unittest +import asyncio +import logging +from src.lighthouseweb3 import Kavach + +logger = logging.getLogger(__name__) + +class TestRecoverKey(unittest.TestCase): + """Test cases for the recoverKey module.""" + + def test_empty_shares_list(self): + """Test that recovery fails with empty shares list.""" + async def run_test(): + result = await Kavach.recoverKey([]) + self.assertEqual(result['masterKey'], '0x0000000000000000000000000000000000000000000000000000000000000000') + self.assertIsNone(result['error']) + + return asyncio.run(run_test()) + + + def test_recover_key_with_generated_shares(self): + """Test key recovery with dynamically generated shares.""" + async def run_test(): + + threshold = 3 + key_count = 5 + gen_result = await Kavach.generate(threshold=threshold, keyCount=key_count) + master_key = gen_result['masterKey'] + + shares = gen_result['keyShards'][:threshold] + result = await Kavach.recoverKey(shares) + self.assertEqual(result['masterKey'], master_key) + self.assertIsNone(result['error']) + + for i in range(key_count - threshold + 1): + subset = gen_result['keyShards'][i:i+threshold] + result = await Kavach.recoverKey(subset) + self.assertEqual(result['masterKey'], master_key) + self.assertIsNone(result['error']) + + return result + + return asyncio.run(run_test()) + + def test_recover_key_insufficient_shares(self): + """Test with minimum threshold shares""" + async def run_test(): + threshold = 2 + key_count = 5 + gen_result = await Kavach.generate(threshold=threshold, keyCount=key_count) + master_key = gen_result['masterKey'] + shares = gen_result['keyShards'][:threshold] + result = await Kavach.recoverKey(shares) + self.assertEqual(result['masterKey'], master_key) + self.assertIsNone(result['error']) + + result = await Kavach.recoverKey(gen_result['keyShards']) + self.assertEqual(result['masterKey'], master_key) + self.assertIsNone(result['error']) + + return asyncio.run(run_test()) + + def test_insufficient_shares(self): + """Test with insufficient shares for recovery""" + async def run_test(): + threshold = 3 + key_count = 5 + gen_result = await Kavach.generate(threshold=threshold, keyCount=key_count) + + # Test with one less than threshold (should still work as long as we have at least 2 shares) + result = await Kavach.recoverKey(gen_result['keyShards'][:threshold-1]) + self.assertIsNotNone(result['masterKey']) + self.assertIsNone(result['error']) + + # Test with single share (should still work as long as we have at least 1 share) + result = await Kavach.recoverKey(gen_result['keyShards'][:1]) + self.assertIsNotNone(result['masterKey']) + self.assertIsNone(result['error']) + + return asyncio.run(run_test()) + + def test_various_threshold_combinations(self): + """Test recovery with various threshold and share count combinations""" + async def run_test(): + test_cases = [ + (2, 3), + (3, 5), + (4, 7), + (3, 10), + ] + for threshold, total in test_cases: + with self.subTest(threshold=threshold, total=total): + gen_result = await Kavach.generate( + threshold=threshold, + keyCount=total + ) + master_key = gen_result['masterKey'] + + shares = gen_result['keyShards'][:threshold] + result = await Kavach.recoverKey(shares) + self.assertEqual(result['masterKey'], master_key) + self.assertIsNone(result['error']) + + result = await Kavach.recoverKey(gen_result['keyShards']) + self.assertEqual(result['masterKey'], master_key) + self.assertIsNone(result['error']) + + import random + subset = random.sample(gen_result['keyShards'], threshold + 1) + result = await Kavach.recoverKey(subset) + self.assertEqual(result['masterKey'], master_key) + self.assertIsNone(result['error']) + + return asyncio.run(run_test()) + + + def test_invalid_share_format(self): + """Test that invalid share formats are handled correctly.""" + async def run_test(): + result = await Kavach.recoverKey(["not a dict", "another invalid"]) + self.assertIsNone(result['masterKey']) + self.assertIn("must be a dictionary", result['error']) + + result = await Kavach.recoverKey([{'key': '123'}, {'key': '456'}]) + self.assertIsNone(result['masterKey']) + self.assertIn("missing required fields 'key' or 'index'", result['error'].lower()) + + result = await Kavach.recoverKey([ + {'key': 'invalidhex', 'index': '1'}, + {'key': 'invalidhex2', 'index': '2'} + ]) + self.assertIsNone(result['masterKey']) + self.assertIn("invalid key format", result['error'].lower()) + + result = await Kavach.recoverKey([ + {'key': 'a' * 63, 'index': 'invalidindex'}, + {'key': 'b' * 63, 'index': 'invalidindex2'} + ]) + self.assertIsNone(result['masterKey']) + self.assertIn("invalid index format", result['error'].lower()) + + return asyncio.run(run_test()) + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/tests/tests_kavach/test_shard_key.py b/tests/tests_kavach/test_shard_key.py new file mode 100644 index 0000000..c3aa209 --- /dev/null +++ b/tests/tests_kavach/test_shard_key.py @@ -0,0 +1,156 @@ +import unittest +import asyncio +import logging +from src.lighthouseweb3 import Kavach + +logger = logging.getLogger(__name__) + +class TestShardKey(unittest.TestCase): + """Test cases for the shardKey function.""" + + def test_shardKey_valid_32_byte_key(self): + """Test shardKey with valid 32-byte keys.""" + async def run_test(): + valid_key = "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + result = await Kavach.shardKey(valid_key, threshold=2, keyCount=3) + + self.assertTrue(result['isShardable']) + self.assertIn('keyShards', result) + self.assertEqual(len(result['keyShards']), 3) + + for shard in result['keyShards']: + self.assertIn('key', shard) + self.assertIn('index', shard) + self.assertTrue(shard['key'].startswith('0x')) + self.assertTrue(shard['index'].startswith('0x')) + self.assertTrue(all(c in '0123456789abcdef' for c in shard['key'][2:])) + self.assertTrue(all(c in '0123456789abcdef' for c in shard['index'][2:])) + + valid_key_with_prefix = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + result2 = await Kavach.shardKey(valid_key_with_prefix, threshold=2, keyCount=3) + + self.assertTrue(result2['isShardable']) + self.assertEqual(len(result2['keyShards']), 3) + + return result + + return asyncio.run(run_test()) + + def test_shardKey_invalid_keys(self): + """Test shardKey with invalid keys.""" + async def run_test(): + short_key = "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcd" + with self.assertRaises(ValueError) as context: + await Kavach.shardKey(short_key, threshold=2, keyCount=3) + self.assertIn("Invalid key format", str(context.exception)) + + long_key = "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef12" + with self.assertRaises(ValueError) as context: + await Kavach.shardKey(long_key, threshold=2, keyCount=3) + self.assertIn("Invalid key format", str(context.exception)) + + malformed_key = "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdefg" + with self.assertRaises(ValueError) as context: + await Kavach.shardKey(malformed_key, threshold=2, keyCount=3) + self.assertIn("Invalid key format", str(context.exception)) + + with self.assertRaises(ValueError) as context: + await Kavach.shardKey("", threshold=2, keyCount=3) + self.assertIn("Invalid key format", str(context.exception)) + + invalid_hex = "xyz4567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + with self.assertRaises(ValueError) as context: + await Kavach.shardKey(invalid_hex, threshold=2, keyCount=3) + self.assertIn("Invalid key format", str(context.exception)) + + return asyncio.run(run_test()) + + def test_shardKey_threshold_keyCount_combinations(self): + """Test various threshold and keyCount combinations.""" + async def run_test(): + valid_key = "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + + result1 = await Kavach.shardKey(valid_key, threshold=1, keyCount=1) + self.assertTrue(result1['isShardable']) + self.assertEqual(len(result1['keyShards']), 1) + + result2 = await Kavach.shardKey(valid_key, threshold=2, keyCount=3) + self.assertTrue(result2['isShardable']) + self.assertEqual(len(result2['keyShards']), 3) + + result3 = await Kavach.shardKey(valid_key, threshold=3, keyCount=5) + self.assertTrue(result3['isShardable']) + self.assertEqual(len(result3['keyShards']), 5) + + result4 = await Kavach.shardKey(valid_key, threshold=4, keyCount=4) + self.assertTrue(result4['isShardable']) + self.assertEqual(len(result4['keyShards']), 4) + + result5 = await Kavach.shardKey(valid_key, threshold=5, keyCount=10) + self.assertTrue(result5['isShardable']) + self.assertEqual(len(result5['keyShards']), 10) + + indices = [shard['index'] for shard in result5['keyShards']] + self.assertEqual(len(set(indices)), 10) + + return result5 + + return asyncio.run(run_test()) + + + def test_shardKey_index_uniqueness(self): + """Test that all generated indices are unique and non-zero.""" + async def run_test(): + valid_key = "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + + + result = await Kavach.shardKey(valid_key, threshold=3, keyCount=20) + + self.assertTrue(result['isShardable']) + self.assertEqual(len(result['keyShards']), 20) + + indices = [shard['index'] for shard in result['keyShards']] + self.assertEqual(len(set(indices)), 20) + + for index in indices: + self.assertNotEqual(index, '0x0') + + self.assertNotEqual(int(index, 16), 0) + + return result + + return asyncio.run(run_test()) + + def test_shardKey_hex_format_consistency(self): + """Test that all returned values are properly formatted hex strings.""" + async def run_test(): + valid_key = "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + + result = await Kavach.shardKey(valid_key, threshold=2, keyCount=4) + + self.assertTrue(result['isShardable']) + + for shard in result['keyShards']: + key = shard['key'] + index = shard['index'] + + + self.assertTrue(key.startswith('0x')) + self.assertTrue(index.startswith('0x')) + + + self.assertTrue(all(c in '0123456789abcdef' for c in key[2:])) + self.assertTrue(all(c in '0123456789abcdef' for c in index[2:])) + + try: + int(key, 16) + int(index, 16) + except ValueError: + self.fail(f"Invalid hex format: key={key}, index={index}") + + return result + + return asyncio.run(run_test()) + +if __name__ == '__main__': + unittest.main(verbosity=2) \ No newline at end of file