Skip to content

Commit fa5b394

Browse files
add some docstrings
1 parent 8c52301 commit fa5b394

File tree

2 files changed

+106
-0
lines changed

2 files changed

+106
-0
lines changed

tensorcircuit/results/counts.py

Lines changed: 99 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,22 +12,76 @@
1212

1313

1414
def reverse_count(count: ct) -> ct:
15+
"""
16+
Reverse the bit string keys in a count dictionary.
17+
18+
:param count: A dictionary mapping bit strings to counts
19+
:type count: ct
20+
:return: A new dictionary with reversed bit string keys
21+
:rtype: ct
22+
23+
:Example:
24+
25+
>>> reverse_count({"01": 10, "10": 20})
26+
{'10': 10, '01': 20}
27+
"""
1528
ncount = {}
1629
for k, v in count.items():
1730
ncount[k[::-1]] = v
1831
return ncount
1932

2033

2134
def sort_count(count: ct) -> ct:
35+
"""
36+
Sort the count dictionary by counts in descending order.
37+
38+
:param count: A dictionary mapping bit strings to counts
39+
:type count: ct
40+
:return: A new dictionary sorted by count values (descending)
41+
:rtype: ct
42+
43+
:Example:
44+
45+
>>> sort_count({"00": 5, "01": 15, "10": 10})
46+
{'01': 15, '10': 10, '00': 5}
47+
"""
2248
return {k: v for k, v in sorted(count.items(), key=lambda item: -item[1])}
2349

2450

2551
def normalized_count(count: ct) -> Dict[str, float]:
52+
"""
53+
Normalize the count dictionary to represent probabilities.
54+
55+
:param count: A dictionary mapping bit strings to counts
56+
:type count: ct
57+
:return: A new dictionary with probabilities instead of counts
58+
:rtype: Dict[str, float]
59+
60+
:Example:
61+
62+
>>> normalized_count({"00": 5, "01": 15})
63+
{'00': 0.25, '01': 0.75}
64+
"""
2665
shots = sum([v for k, v in count.items()])
2766
return {k: v / shots for k, v in count.items()}
2867

2968

3069
def marginal_count(count: ct, keep_list: Sequence[int]) -> ct:
70+
"""
71+
Compute the marginal distribution of a count dictionary over specified qubits.
72+
73+
:param count: A dictionary mapping bit strings to counts
74+
:type count: ct
75+
:param keep_list: List of qubit indices to keep in the marginal distribution
76+
:type keep_list: Sequence[int]
77+
:return: A new count dictionary with marginal distribution
78+
:rtype: ct
79+
80+
:Example:
81+
82+
>>> marginal_count({"001": 10, "110": 20}, [0, 2])
83+
{'01': 10, '10': 20}
84+
"""
3185
import qiskit
3286

3387
count = reverse_count(count)
@@ -36,6 +90,21 @@ def marginal_count(count: ct, keep_list: Sequence[int]) -> ct:
3690

3791

3892
def count2vec(count: ct, normalization: bool = True) -> Tensor:
93+
"""
94+
Convert count dictionary to probability vector.
95+
96+
:param count: A dictionary mapping bit strings to counts
97+
:type count: ct
98+
:param normalization: Whether to normalize the counts to probabilities, defaults to True
99+
:type normalization: bool, optional
100+
:return: Probability vector as numpy array
101+
:rtype: Tensor
102+
103+
:Example:
104+
105+
>>> count2vec({"00": 2, "10": 3, "11": 5})
106+
array([0.2, 0. , 0.3, 0.5])
107+
"""
39108
nqubit = len(list(count.keys())[0])
40109
probability = [0] * 2**nqubit
41110
shots = sum([v for k, v in count.items()])
@@ -47,6 +116,21 @@ def count2vec(count: ct, normalization: bool = True) -> Tensor:
47116

48117

49118
def vec2count(vec: Tensor, prune: bool = False) -> ct:
119+
"""
120+
Convert probability vector to count dictionary.
121+
122+
:param vec: Probability vector
123+
:type vec: Tensor
124+
:param prune: Whether to remove near-zero probabilities, defaults to False
125+
:type prune: bool, optional
126+
:return: Count dictionary
127+
:rtype: ct
128+
129+
:Example:
130+
131+
>>> vec2count(np.array([0.2, 0.3, 0.1, 0.4]))
132+
{'00': 0.2, '01': 0.3, '10': 0.1, '11': 0.4}
133+
"""
50134
from ..quantum import count_vector2dict
51135

52136
if isinstance(vec, list):
@@ -63,6 +147,16 @@ def vec2count(vec: Tensor, prune: bool = False) -> ct:
63147

64148

65149
def kl_divergence(c1: ct, c2: ct) -> float:
150+
"""
151+
Compute the Kullback-Leibler divergence between two count distributions.
152+
153+
:param c1: First count dictionary
154+
:type c1: ct
155+
:param c2: Second count dictionary
156+
:type c2: ct
157+
:return: KL divergence value
158+
:rtype: float
159+
"""
66160
eps = 1e-4 # typical value for inverse of the total shots
67161
c1 = normalized_count(c1) # type: ignore
68162
c2 = normalized_count(c2) # type: ignore
@@ -113,6 +207,11 @@ def merge_count(*counts: ct) -> ct:
113207
:type counts: ct
114208
:return: Merged count dictionary
115209
:rtype: ct
210+
211+
:Example:
212+
213+
>>> merge_count({"00": 10, "01": 20}, {"00": 5, "10": 15})
214+
{'00': 15, '01': 20, '10': 15}
116215
"""
117216
merged: ct = {}
118217
for count in counts:

tensorcircuit/utils.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,13 @@
1111

1212

1313
def gpu_memory_share(flag: bool = True) -> None:
14+
"""
15+
Set the GPU memory growth mode
16+
17+
:param flag: whether to set the GPU memory growth mode, defaults to True
18+
:type flag: bool
19+
:return: None
20+
"""
1421
# TODO(@refraction-ray): the default torch behavior should be True
1522
# preallocate behavior for torch to be investigated
1623
if flag is True:

0 commit comments

Comments
 (0)