You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: src/Core.jl
+10-69Lines changed: 10 additions & 69 deletions
Original file line number
Diff line number
Diff line change
@@ -47,16 +47,16 @@ Probabilistic modeling with a tensor network.
47
47
### Fields
48
48
* `vars` are the degrees of freedom in the tensor network.
49
49
* `code` is the tensor network contraction pattern.
50
-
* `tensors` are the tensors fed into the tensor network, the leading tensors are unity tensors associated with `mars`.
50
+
* `tensors` are the tensors fed into the tensor network, the leading tensors are unity tensors associated with `unity_tensors_labels`.
51
51
* `evidence` is a dictionary used to specify degrees of freedom that are fixed to certain values.
52
-
* `mars` is a vector, each element is a vector of variables to compute marginal probabilities.
52
+
* `unity_tensors_idx` is a vector of indices of the unity tensors in the `tensors` array. Unity tensors are dummy tensors used to obtain the marginal probabilities.
53
53
"""
54
54
struct TensorNetworkModel{LT, ET, MT <:AbstractArray}
55
55
vars::Vector{LT}
56
56
code::ET
57
57
tensors::Vector{MT}
58
58
evidence::Dict{LT, Int}
59
-
mars::Vector{Vector{LT}}
59
+
unity_tensors_idx::Vector{Int}
60
60
end
61
61
62
62
"""
@@ -110,84 +110,25 @@ $(TYPEDSIGNATURES)
110
110
* `evidence` is a dictionary of evidences, the values are integers start counting from 0.
111
111
* `optimizer` is the tensor network contraction order optimizer, please check the package [`OMEinsumContractionOrders.jl`](https://github.com/TensorBFS/OMEinsumContractionOrders.jl) for available algorithms.
112
112
* `simplifier` is some strategies for speeding up the `optimizer`, please refer the same link above.
113
-
* `mars` is a list of marginal probabilities. It is all single variables by default, i.e. `[[1], [2], ..., [n]]`. One can also specify multi-variables, which may increase the computational complexity.
113
+
* `unity_tensors_labels` is a list of labels for the unity tensors. It is all single variables by default, i.e. `[[1], [2], ..., [n]]`. One can also specify multi-variables, which may increase the computational complexity.
114
114
"""
115
115
functionTensorNetworkModel(
116
-
model::UAIModel;
116
+
model::UAIModel{ET, FT};
117
117
openvars = (),
118
118
evidence =Dict{Int,Int}(),
119
119
optimizer =GreedyMethod(),
120
120
simplifier =nothing,
121
-
mars = [[i] for i=1:model.nvars]
122
-
)::TensorNetworkModel
123
-
returnTensorNetworkModel(
124
-
1:(model.nvars),
125
-
model.cards,
126
-
model.factors;
127
-
openvars,
128
-
evidence,
129
-
optimizer,
130
-
simplifier,
131
-
mars
132
-
)
133
-
end
134
-
135
-
"""
136
-
$(TYPEDSIGNATURES)
137
-
"""
138
-
functionTensorNetworkModel(
139
-
vars::AbstractVector{LT},
140
-
cards::AbstractVector{Int},
141
-
factors::Vector{<:Factor{T}};
142
-
openvars = (),
143
-
evidence =Dict{LT, Int}(),
144
-
optimizer =GreedyMethod(),
145
-
simplifier =nothing,
146
-
mars = [[v] for v in vars]
147
-
)::TensorNetworkModelwhere {T, LT}
148
-
# The 1st argument of `EinCode` is a vector of vector of labels for specifying the input tensors,
149
-
# The 2nd argument of `EinCode` is a vector of labels for specifying the output tensor,
150
-
# e.g.
151
-
# `EinCode([[1, 2], [2, 3]], [1, 3])` is the EinCode for matrix multiplication.
152
-
rawcode =EinCode([mars..., [[factor.vars...] for factor in factors]...], collect(LT, openvars)) # labels for vertex tensors (unity tensors) and edge tensors
153
-
tensors = Array{T}[[ones(T, [cards[i] for i in mar]...) for mar in mars]..., [t.vals for t in factors]...]
# `optimize_code` optimizes the contraction order of a raw tensor network without a contraction order specified.
170
124
# The 1st argument is the contraction pattern to be optimized (without contraction order).
171
125
# The 2nd arugment is the size dictionary, which is a label-integer dictionary.
172
126
# The 3rd and 4th arguments are the optimizer and simplifier that configures which algorithm to use and simplify.
127
+
rawcode =EinCode([unity_tensors_labels..., [[factor.vars...] for factor in model.factors]...], collect(Int, openvars)) # labels for vertex tensors (unity tensors) and edge tensors
128
+
tensors = Array{ET}[[ones(ET, [model.cards[i] for i in lb]...) for lb in unity_tensors_labels]..., [t.vals for t in model.factors]...]
@assert tnet.unity_tensors_idx ==collect(1:length(tnet.unity_tensors_idx)) "The target tensor network can not be updated! Got `unity_tensors_idx = $(tnet.unity_tensors_idx)`"
@assert tn.mars[1:length(expected_mars)] == expected_mars "To get the the most probable configuration, the leading elements of `tn.vars` must be `$expected_mars`"
56
+
ixs = OMEinsum.getixsv(tn.code)
57
+
unity_labels = ixs[tn.unity_tensors_idx]
58
+
indices = [findfirst(==([l]), unity_labels) for l inget_vars(tn)]
59
+
@assert!any(isnothing, indices) "To get the the most probable configuration, the unity tensors labels must include all variables"
0 commit comments