You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardexpand all lines: src/Core.jl
+23-10
Original file line number
Diff line number
Diff line change
@@ -47,14 +47,16 @@ Probabilistic modeling with a tensor network.
47
47
### Fields
48
48
* `vars` are the degrees of freedom in the tensor network.
49
49
* `code` is the tensor network contraction pattern.
50
-
* `tensors` are the tensors fed into the tensor network.
50
+
* `tensors` are the tensors fed into the tensor network, the leading tensors are unity tensors associated with `mars`.
51
51
* `evidence` is a dictionary used to specify degrees of freedom that are fixed to certain values.
52
+
* `mars` is a vector, each element is a vector of variables to compute marginal probabilities.
52
53
"""
53
54
struct TensorNetworkModel{LT, ET, MT <:AbstractArray}
54
55
vars::Vector{LT}
55
56
code::ET
56
57
tensors::Vector{MT}
57
58
evidence::Dict{LT, Int}
59
+
mars::Vector{Vector{LT}}
58
60
end
59
61
60
62
function Base.show(io::IO, tn::TensorNetworkModel)
@@ -85,13 +87,21 @@ end
85
87
86
88
"""
87
89
$(TYPEDSIGNATURES)
90
+
91
+
### Keyword Arguments
92
+
* `openvars` is the list of variables that remains in the output. If it is not empty, the return value will be a nonzero ranked tensor.
93
+
* `evidence` is a dictionary of evidences, the values are integers start counting from 0.
94
+
* `optimizer` is the tensor network contraction order optimizer, please check the package [`OMEinsumContractionOrders.jl`](https://github.com/TensorBFS/OMEinsumContractionOrders.jl) for available algorithms.
95
+
* `simplifier` is some strategies for speeding up the `optimizer`, please refer the same link above.
96
+
* `mars` is a list of marginal probabilities. It is all single variables by default, i.e. `[[1], [2], ..., [n]]`. One can also specify multi-variables, which may increase the computational complexity.
88
97
"""
89
98
functionTensorNetworkModel(
90
99
model::UAIModel;
91
100
openvars = (),
92
101
evidence =Dict{Int,Int}(),
93
102
optimizer =GreedyMethod(),
94
-
simplifier =nothing
103
+
simplifier =nothing,
104
+
mars = [[i] for i=1:model.nvars]
95
105
)::TensorNetworkModel
96
106
returnTensorNetworkModel(
97
107
1:(model.nvars),
@@ -100,7 +110,8 @@ function TensorNetworkModel(
100
110
openvars,
101
111
evidence,
102
112
optimizer,
103
-
simplifier
113
+
simplifier,
114
+
mars
104
115
)
105
116
end
106
117
@@ -114,15 +125,16 @@ function TensorNetworkModel(
114
125
openvars = (),
115
126
evidence =Dict{LT, Int}(),
116
127
optimizer =GreedyMethod(),
117
-
simplifier =nothing
128
+
simplifier =nothing,
129
+
mars = [[v] for v in vars]
118
130
)::TensorNetworkModelwhere {T, LT}
119
131
# The 1st argument of `EinCode` is a vector of vector of labels for specifying the input tensors,
120
132
# The 2nd argument of `EinCode` is a vector of labels for specifying the output tensor,
121
133
# e.g.
122
134
# `EinCode([[1, 2], [2, 3]], [1, 3])` is the EinCode for matrix multiplication.
123
-
rawcode =EinCode([[[var] for var in vars]..., [[factor.vars...] for factor in factors]...], collect(LT, openvars)) # labels for vertex tensors (unity tensors) and edge tensors
124
-
tensors = Array{T}[[ones(T, cards[i])for i in1:length(vars)]..., [t.vals for t in factors]...]
rawcode =EinCode([mars..., [[factor.vars...] for factor in factors]...], collect(LT, openvars)) # labels for vertex tensors (unity tensors) and edge tensors
136
+
tensors = Array{T}[[ones(T, [cards[i] for i inmar]...) for mar in mars]..., [t.vals for t in factors]...]
@assert tn.mars[1:length(expected_mars)] == expected_mars "To get the the most probable configuration, the leading elements of `tn.vars` must be `$expected_mars`"
0 commit comments