0
4
0
64
45
89
69
128
16
1 | 1 |
/* -*- C++ -*- |
2 | 2 |
* |
3 | 3 |
* This file is a part of LEMON, a generic C++ optimization library |
4 | 4 |
* |
5 | 5 |
* Copyright (C) 2003-2008 |
6 | 6 |
* Egervary Jeno Kombinatorikus Optimalizalasi Kutatocsoport |
7 | 7 |
* (Egervary Research Group on Combinatorial Optimization, EGRES). |
8 | 8 |
* |
9 | 9 |
* Permission to use, modify and distribute this software is granted |
10 | 10 |
* provided that this copyright notice appears in all copies. For |
11 | 11 |
* precise terms see the accompanying LICENSE file. |
12 | 12 |
* |
13 | 13 |
* This software is provided "AS IS" with no warranty of any kind, |
14 | 14 |
* express or implied, and with no claim as to its suitability for any |
15 | 15 |
* purpose. |
16 | 16 |
* |
17 | 17 |
*/ |
18 | 18 |
|
19 | 19 |
#ifndef LEMON_GOMORY_HU_TREE_H |
20 | 20 |
#define LEMON_GOMORY_HU_TREE_H |
21 | 21 |
|
22 | 22 |
#include <limits> |
23 | 23 |
|
24 | 24 |
#include <lemon/core.h> |
25 | 25 |
#include <lemon/preflow.h> |
26 | 26 |
#include <lemon/concept_check.h> |
27 | 27 |
#include <lemon/concepts/maps.h> |
28 | 28 |
|
29 | 29 |
/// \ingroup min_cut |
30 | 30 |
/// \file |
31 | 31 |
/// \brief Gomory-Hu cut tree in graphs. |
32 | 32 |
|
33 | 33 |
namespace lemon { |
34 | 34 |
|
35 | 35 |
/// \ingroup min_cut |
36 | 36 |
/// |
37 | 37 |
/// \brief Gomory-Hu cut tree algorithm |
38 | 38 |
/// |
39 | 39 |
/// The Gomory-Hu tree is a tree on the node set of a given graph, but it |
40 | 40 |
/// may contain edges which are not in the original graph. It has the |
41 | 41 |
/// property that the minimum capacity edge of the path between two nodes |
42 | 42 |
/// in this tree has the same weight as the minimum cut in the graph |
43 | 43 |
/// between these nodes. Moreover the components obtained by removing |
44 | 44 |
/// this edge from the tree determine the corresponding minimum cut. |
45 |
/// |
|
46 | 45 |
/// Therefore once this tree is computed, the minimum cut between any pair |
47 | 46 |
/// of nodes can easily be obtained. |
48 | 47 |
/// |
49 | 48 |
/// The algorithm calculates \e n-1 distinct minimum cuts (currently with |
50 |
/// the \ref Preflow algorithm), therefore the algorithm has |
|
51 |
/// \f$(O(n^3\sqrt{e})\f$ overall time complexity. It calculates a |
|
52 |
/// rooted Gomory-Hu tree, its structure and the weights can be obtained |
|
53 |
/// by \c predNode(), \c predValue() and \c rootDist(). |
|
54 |
/// |
|
55 |
/// The members \c minCutMap() and \c minCutValue() calculate |
|
49 |
/// the \ref Preflow algorithm), thus it has \f$O(n^3\sqrt{e})\f$ overall |
|
50 |
/// time complexity. It calculates a rooted Gomory-Hu tree. |
|
51 |
/// The structure of the tree and the edge weights can be |
|
52 |
/// obtained using \c predNode(), \c predValue() and \c rootDist(). |
|
53 |
/// The functions \c minCutMap() and \c minCutValue() calculate |
|
56 | 54 |
/// the minimum cut and the minimum cut value between any two nodes |
57 | 55 |
/// in the graph. You can also list (iterate on) the nodes and the |
58 | 56 |
/// edges of the cuts using \c MinCutNodeIt and \c MinCutEdgeIt. |
59 | 57 |
/// |
60 | 58 |
/// \tparam GR The type of the undirected graph the algorithm runs on. |
61 |
/// \tparam CAP The type of the edge map describing the edge capacities. |
|
62 |
/// It is \ref concepts::Graph::EdgeMap "GR::EdgeMap<int>" by default. |
|
59 |
/// \tparam CAP The type of the edge map containing the capacities. |
|
60 |
/// The default map type is \ref concepts::Graph::EdgeMap "GR::EdgeMap<int>". |
|
63 | 61 |
#ifdef DOXYGEN |
64 | 62 |
template <typename GR, |
65 | 63 |
typename CAP> |
66 | 64 |
#else |
67 | 65 |
template <typename GR, |
68 | 66 |
typename CAP = typename GR::template EdgeMap<int> > |
69 | 67 |
#endif |
70 | 68 |
class GomoryHu { |
71 | 69 |
public: |
72 | 70 |
|
73 |
/// The graph type |
|
71 |
/// The graph type of the algorithm |
|
74 | 72 |
typedef GR Graph; |
75 |
/// The type of the |
|
73 |
/// The capacity map type of the algorithm |
|
76 | 74 |
typedef CAP Capacity; |
77 | 75 |
/// The value type of capacities |
78 | 76 |
typedef typename Capacity::Value Value; |
79 | 77 |
|
80 | 78 |
private: |
81 | 79 |
|
82 | 80 |
TEMPLATE_GRAPH_TYPEDEFS(Graph); |
83 | 81 |
|
84 | 82 |
const Graph& _graph; |
85 | 83 |
const Capacity& _capacity; |
86 | 84 |
|
87 | 85 |
Node _root; |
88 | 86 |
typename Graph::template NodeMap<Node>* _pred; |
89 | 87 |
typename Graph::template NodeMap<Value>* _weight; |
90 | 88 |
typename Graph::template NodeMap<int>* _order; |
91 | 89 |
|
92 | 90 |
void createStructures() { |
93 | 91 |
if (!_pred) { |
94 | 92 |
_pred = new typename Graph::template NodeMap<Node>(_graph); |
95 | 93 |
} |
96 | 94 |
if (!_weight) { |
97 | 95 |
_weight = new typename Graph::template NodeMap<Value>(_graph); |
98 | 96 |
} |
99 | 97 |
if (!_order) { |
100 | 98 |
_order = new typename Graph::template NodeMap<int>(_graph); |
101 | 99 |
} |
102 | 100 |
} |
103 | 101 |
|
104 | 102 |
void destroyStructures() { |
105 | 103 |
if (_pred) { |
106 | 104 |
delete _pred; |
107 | 105 |
} |
108 | 106 |
if (_weight) { |
109 | 107 |
delete _weight; |
110 | 108 |
} |
111 | 109 |
if (_order) { |
112 | 110 |
delete _order; |
113 | 111 |
} |
114 | 112 |
} |
115 | 113 |
|
116 | 114 |
public: |
117 | 115 |
|
118 | 116 |
/// \brief Constructor |
119 | 117 |
/// |
120 |
/// Constructor |
|
118 |
/// Constructor. |
|
121 | 119 |
/// \param graph The undirected graph the algorithm runs on. |
122 | 120 |
/// \param capacity The edge capacity map. |
123 | 121 |
GomoryHu(const Graph& graph, const Capacity& capacity) |
124 | 122 |
: _graph(graph), _capacity(capacity), |
125 | 123 |
_pred(0), _weight(0), _order(0) |
126 | 124 |
{ |
127 | 125 |
checkConcept<concepts::ReadMap<Edge, Value>, Capacity>(); |
128 | 126 |
} |
129 | 127 |
|
130 | 128 |
|
131 | 129 |
/// \brief Destructor |
132 | 130 |
/// |
133 |
/// Destructor |
|
131 |
/// Destructor. |
|
134 | 132 |
~GomoryHu() { |
135 | 133 |
destroyStructures(); |
136 | 134 |
} |
137 | 135 |
|
138 | 136 |
private: |
139 | 137 |
|
140 | 138 |
// Initialize the internal data structures |
141 | 139 |
void init() { |
142 | 140 |
createStructures(); |
143 | 141 |
|
144 | 142 |
_root = NodeIt(_graph); |
145 | 143 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
146 | 144 |
(*_pred)[n] = _root; |
147 | 145 |
(*_order)[n] = -1; |
148 | 146 |
} |
149 | 147 |
(*_pred)[_root] = INVALID; |
150 | 148 |
(*_weight)[_root] = std::numeric_limits<Value>::max(); |
151 | 149 |
} |
152 | 150 |
|
153 | 151 |
|
154 | 152 |
// Start the algorithm |
155 | 153 |
void start() { |
156 | 154 |
Preflow<Graph, Capacity> fa(_graph, _capacity, _root, INVALID); |
157 | 155 |
|
158 | 156 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
159 | 157 |
if (n == _root) continue; |
160 | 158 |
|
161 | 159 |
Node pn = (*_pred)[n]; |
162 | 160 |
fa.source(n); |
163 | 161 |
fa.target(pn); |
164 | 162 |
|
165 | 163 |
fa.runMinCut(); |
166 | 164 |
|
167 | 165 |
(*_weight)[n] = fa.flowValue(); |
168 | 166 |
|
169 | 167 |
for (NodeIt nn(_graph); nn != INVALID; ++nn) { |
170 | 168 |
if (nn != n && fa.minCut(nn) && (*_pred)[nn] == pn) { |
171 | 169 |
(*_pred)[nn] = n; |
172 | 170 |
} |
173 | 171 |
} |
174 | 172 |
if ((*_pred)[pn] != INVALID && fa.minCut((*_pred)[pn])) { |
175 | 173 |
(*_pred)[n] = (*_pred)[pn]; |
176 | 174 |
(*_pred)[pn] = n; |
177 | 175 |
(*_weight)[n] = (*_weight)[pn]; |
178 | 176 |
(*_weight)[pn] = fa.flowValue(); |
179 | 177 |
} |
180 | 178 |
} |
181 | 179 |
|
182 | 180 |
(*_order)[_root] = 0; |
183 | 181 |
int index = 1; |
184 | 182 |
|
185 | 183 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
186 | 184 |
std::vector<Node> st; |
187 | 185 |
Node nn = n; |
188 | 186 |
while ((*_order)[nn] == -1) { |
189 | 187 |
st.push_back(nn); |
190 | 188 |
nn = (*_pred)[nn]; |
191 | 189 |
} |
192 | 190 |
while (!st.empty()) { |
193 | 191 |
(*_order)[st.back()] = index++; |
194 | 192 |
st.pop_back(); |
195 | 193 |
} |
196 | 194 |
} |
197 | 195 |
} |
198 | 196 |
|
199 | 197 |
public: |
200 | 198 |
|
201 | 199 |
///\name Execution Control |
202 | 200 |
|
203 | 201 |
///@{ |
204 | 202 |
|
205 | 203 |
/// \brief Run the Gomory-Hu algorithm. |
206 | 204 |
/// |
207 | 205 |
/// This function runs the Gomory-Hu algorithm. |
208 | 206 |
void run() { |
209 | 207 |
init(); |
210 | 208 |
start(); |
211 | 209 |
} |
212 | 210 |
|
213 | 211 |
/// @} |
214 | 212 |
|
215 | 213 |
///\name Query Functions |
216 | 214 |
///The results of the algorithm can be obtained using these |
217 | 215 |
///functions.\n |
218 |
///\ref run() |
|
216 |
///\ref run() should be called before using them.\n |
|
219 | 217 |
///See also \ref MinCutNodeIt and \ref MinCutEdgeIt. |
220 | 218 |
|
221 | 219 |
///@{ |
222 | 220 |
|
223 | 221 |
/// \brief Return the predecessor node in the Gomory-Hu tree. |
224 | 222 |
/// |
225 |
/// This function returns the predecessor node in the Gomory-Hu tree. |
|
226 |
/// If the node is |
|
227 |
/// the root of the Gomory-Hu tree, then it returns \c INVALID. |
|
228 |
Node predNode(const Node& node) { |
|
223 |
/// This function returns the predecessor node of the given node |
|
224 |
/// in the Gomory-Hu tree. |
|
225 |
/// If \c node is the root of the tree, then it returns \c INVALID. |
|
226 |
/// |
|
227 |
/// \pre \ref run() must be called before using this function. |
|
228 |
Node predNode(const Node& node) const { |
|
229 | 229 |
return (*_pred)[node]; |
230 | 230 |
} |
231 | 231 |
|
232 |
/// \brief Return the distance from the root node in the Gomory-Hu tree. |
|
233 |
/// |
|
234 |
/// This function returns the distance of \c node from the root node |
|
235 |
/// in the Gomory-Hu tree. |
|
236 |
int rootDist(const Node& node) { |
|
237 |
return (*_order)[node]; |
|
238 |
} |
|
239 |
|
|
240 | 232 |
/// \brief Return the weight of the predecessor edge in the |
241 | 233 |
/// Gomory-Hu tree. |
242 | 234 |
/// |
243 |
/// This function returns the weight of the predecessor edge in the |
|
244 |
/// Gomory-Hu tree. If the node is the root, the result is undefined. |
|
245 |
|
|
235 |
/// This function returns the weight of the predecessor edge of the |
|
236 |
/// given node in the Gomory-Hu tree. |
|
237 |
/// If \c node is the root of the tree, the result is undefined. |
|
238 |
/// |
|
239 |
/// \pre \ref run() must be called before using this function. |
|
240 |
Value predValue(const Node& node) const { |
|
246 | 241 |
return (*_weight)[node]; |
247 | 242 |
} |
248 | 243 |
|
244 |
/// \brief Return the distance from the root node in the Gomory-Hu tree. |
|
245 |
/// |
|
246 |
/// This function returns the distance of the given node from the root |
|
247 |
/// node in the Gomory-Hu tree. |
|
248 |
/// |
|
249 |
/// \pre \ref run() must be called before using this function. |
|
250 |
int rootDist(const Node& node) const { |
|
251 |
return (*_order)[node]; |
|
252 |
} |
|
253 |
|
|
249 | 254 |
/// \brief Return the minimum cut value between two nodes |
250 | 255 |
/// |
251 |
/// This function returns the minimum cut value between two nodes. The |
|
252 |
/// algorithm finds the nearest common ancestor in the Gomory-Hu |
|
253 |
/// tree and calculates the minimum weight edge on the paths to |
|
254 |
/// the ancestor. |
|
256 |
/// This function returns the minimum cut value between the nodes |
|
257 |
/// \c s and \c t. |
|
258 |
/// It finds the nearest common ancestor of the given nodes in the |
|
259 |
/// Gomory-Hu tree and calculates the minimum weight edge on the |
|
260 |
/// paths to the ancestor. |
|
261 |
/// |
|
262 |
/// \pre \ref run() must be called before using this function. |
|
255 | 263 |
Value minCutValue(const Node& s, const Node& t) const { |
256 | 264 |
Node sn = s, tn = t; |
257 | 265 |
Value value = std::numeric_limits<Value>::max(); |
258 | 266 |
|
259 | 267 |
while (sn != tn) { |
260 | 268 |
if ((*_order)[sn] < (*_order)[tn]) { |
261 | 269 |
if ((*_weight)[tn] <= value) value = (*_weight)[tn]; |
262 | 270 |
tn = (*_pred)[tn]; |
263 | 271 |
} else { |
264 | 272 |
if ((*_weight)[sn] <= value) value = (*_weight)[sn]; |
265 | 273 |
sn = (*_pred)[sn]; |
266 | 274 |
} |
267 | 275 |
} |
268 | 276 |
return value; |
269 | 277 |
} |
270 | 278 |
|
271 | 279 |
/// \brief Return the minimum cut between two nodes |
272 | 280 |
/// |
273 | 281 |
/// This function returns the minimum cut between the nodes \c s and \c t |
274 | 282 |
/// in the \c cutMap parameter by setting the nodes in the component of |
275 | 283 |
/// \c s to \c true and the other nodes to \c false. |
276 | 284 |
/// |
277 |
/// For higher level interfaces |
|
285 |
/// For higher level interfaces see MinCutNodeIt and MinCutEdgeIt. |
|
286 |
/// |
|
287 |
/// \param s The base node. |
|
288 |
/// \param t The node you want to separate from node \c s. |
|
289 |
/// \param cutMap The cut will be returned in this map. |
|
290 |
/// It must be a \c bool (or convertible) \ref concepts::ReadWriteMap |
|
291 |
/// "ReadWriteMap" on the graph nodes. |
|
292 |
/// |
|
293 |
/// \return The value of the minimum cut between \c s and \c t. |
|
294 |
/// |
|
295 |
/// \pre \ref run() must be called before using this function. |
|
278 | 296 |
template <typename CutMap> |
279 |
Value minCutMap(const Node& s, ///< |
|
297 |
Value minCutMap(const Node& s, ///< |
|
280 | 298 |
const Node& t, |
281 |
///< |
|
299 |
///< |
|
282 | 300 |
CutMap& cutMap |
283 |
///< The cut will be returned in this map. |
|
284 |
/// It must be a \c bool (or convertible) |
|
285 |
/// \ref concepts::ReadWriteMap "ReadWriteMap" |
|
286 |
/// on the graph nodes. |
|
301 |
///< |
|
287 | 302 |
) const { |
288 | 303 |
Node sn = s, tn = t; |
289 | 304 |
bool s_root=false; |
290 | 305 |
Node rn = INVALID; |
291 | 306 |
Value value = std::numeric_limits<Value>::max(); |
292 | 307 |
|
293 | 308 |
while (sn != tn) { |
294 | 309 |
if ((*_order)[sn] < (*_order)[tn]) { |
295 | 310 |
if ((*_weight)[tn] <= value) { |
296 | 311 |
rn = tn; |
297 | 312 |
s_root = false; |
298 | 313 |
value = (*_weight)[tn]; |
299 | 314 |
} |
300 | 315 |
tn = (*_pred)[tn]; |
301 | 316 |
} else { |
302 | 317 |
if ((*_weight)[sn] <= value) { |
303 | 318 |
rn = sn; |
304 | 319 |
s_root = true; |
305 | 320 |
value = (*_weight)[sn]; |
306 | 321 |
} |
307 | 322 |
sn = (*_pred)[sn]; |
308 | 323 |
} |
309 | 324 |
} |
310 | 325 |
|
311 | 326 |
typename Graph::template NodeMap<bool> reached(_graph, false); |
312 | 327 |
reached[_root] = true; |
313 | 328 |
cutMap.set(_root, !s_root); |
314 | 329 |
reached[rn] = true; |
315 | 330 |
cutMap.set(rn, s_root); |
316 | 331 |
|
317 | 332 |
std::vector<Node> st; |
318 | 333 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
319 | 334 |
st.clear(); |
320 | 335 |
Node nn = n; |
321 | 336 |
while (!reached[nn]) { |
322 | 337 |
st.push_back(nn); |
323 | 338 |
nn = (*_pred)[nn]; |
324 | 339 |
} |
325 | 340 |
while (!st.empty()) { |
326 | 341 |
cutMap.set(st.back(), cutMap[nn]); |
327 | 342 |
st.pop_back(); |
328 | 343 |
} |
329 | 344 |
} |
330 | 345 |
|
331 | 346 |
return value; |
332 | 347 |
} |
333 | 348 |
|
334 | 349 |
///@} |
335 | 350 |
|
336 | 351 |
friend class MinCutNodeIt; |
337 | 352 |
|
338 | 353 |
/// Iterate on the nodes of a minimum cut |
339 | 354 |
|
340 | 355 |
/// This iterator class lists the nodes of a minimum cut found by |
341 |
/// GomoryHu. Before using it, you must allocate a GomoryHu class |
|
356 |
/// GomoryHu. Before using it, you must allocate a GomoryHu class |
|
342 | 357 |
/// and call its \ref GomoryHu::run() "run()" method. |
343 | 358 |
/// |
344 | 359 |
/// This example counts the nodes in the minimum cut separating \c s from |
345 | 360 |
/// \c t. |
346 | 361 |
/// \code |
347 | 362 |
/// GomoruHu<Graph> gom(g, capacities); |
348 | 363 |
/// gom.run(); |
349 | 364 |
/// int cnt=0; |
350 | 365 |
/// for(GomoruHu<Graph>::MinCutNodeIt n(gom,s,t); n!=INVALID; ++n) ++cnt; |
351 | 366 |
/// \endcode |
352 | 367 |
class MinCutNodeIt |
353 | 368 |
{ |
354 | 369 |
bool _side; |
355 | 370 |
typename Graph::NodeIt _node_it; |
356 | 371 |
typename Graph::template NodeMap<bool> _cut; |
357 | 372 |
public: |
358 | 373 |
/// Constructor |
359 | 374 |
|
360 | 375 |
/// Constructor. |
361 | 376 |
/// |
362 | 377 |
MinCutNodeIt(GomoryHu const &gomory, |
363 | 378 |
///< The GomoryHu class. You must call its |
364 | 379 |
/// run() method |
365 | 380 |
/// before initializing this iterator. |
366 | 381 |
const Node& s, ///< The base node. |
367 | 382 |
const Node& t, |
368 | 383 |
///< The node you want to separate from node \c s. |
369 | 384 |
bool side=true |
370 | 385 |
///< If it is \c true (default) then the iterator lists |
371 | 386 |
/// the nodes of the component containing \c s, |
372 | 387 |
/// otherwise it lists the other component. |
373 | 388 |
/// \note As the minimum cut is not always unique, |
374 | 389 |
/// \code |
375 | 390 |
/// MinCutNodeIt(gomory, s, t, true); |
376 | 391 |
/// \endcode |
377 | 392 |
/// and |
378 | 393 |
/// \code |
379 | 394 |
/// MinCutNodeIt(gomory, t, s, false); |
380 | 395 |
/// \endcode |
381 | 396 |
/// does not necessarily give the same set of nodes. |
382 | 397 |
/// However it is ensured that |
383 | 398 |
/// \code |
384 | 399 |
/// MinCutNodeIt(gomory, s, t, true); |
385 | 400 |
/// \endcode |
386 | 401 |
/// and |
387 | 402 |
/// \code |
388 | 403 |
/// MinCutNodeIt(gomory, s, t, false); |
389 | 404 |
/// \endcode |
390 | 405 |
/// together list each node exactly once. |
391 | 406 |
) |
392 | 407 |
: _side(side), _cut(gomory._graph) |
393 | 408 |
{ |
394 | 409 |
gomory.minCutMap(s,t,_cut); |
395 | 410 |
for(_node_it=typename Graph::NodeIt(gomory._graph); |
396 | 411 |
_node_it!=INVALID && _cut[_node_it]!=_side; |
397 | 412 |
++_node_it) {} |
398 | 413 |
} |
399 | 414 |
/// Conversion to \c Node |
400 | 415 |
|
401 | 416 |
/// Conversion to \c Node. |
402 | 417 |
/// |
403 | 418 |
operator typename Graph::Node() const |
404 | 419 |
{ |
405 | 420 |
return _node_it; |
406 | 421 |
} |
407 | 422 |
bool operator==(Invalid) { return _node_it==INVALID; } |
408 | 423 |
bool operator!=(Invalid) { return _node_it!=INVALID; } |
409 | 424 |
/// Next node |
410 | 425 |
|
411 | 426 |
/// Next node. |
412 | 427 |
/// |
413 | 428 |
MinCutNodeIt &operator++() |
414 | 429 |
{ |
415 | 430 |
for(++_node_it;_node_it!=INVALID&&_cut[_node_it]!=_side;++_node_it) {} |
416 | 431 |
return *this; |
417 | 432 |
} |
418 | 433 |
/// Postfix incrementation |
419 | 434 |
|
420 | 435 |
/// Postfix incrementation. |
421 | 436 |
/// |
422 | 437 |
/// \warning This incrementation |
423 | 438 |
/// returns a \c Node, not a \c MinCutNodeIt, as one may |
424 | 439 |
/// expect. |
425 | 440 |
typename Graph::Node operator++(int) |
426 | 441 |
{ |
427 | 442 |
typename Graph::Node n=*this; |
428 | 443 |
++(*this); |
429 | 444 |
return n; |
430 | 445 |
} |
431 | 446 |
}; |
432 | 447 |
|
433 | 448 |
friend class MinCutEdgeIt; |
434 | 449 |
|
435 | 450 |
/// Iterate on the edges of a minimum cut |
436 | 451 |
|
437 | 452 |
/// This iterator class lists the edges of a minimum cut found by |
438 |
/// GomoryHu. Before using it, you must allocate a GomoryHu class |
|
453 |
/// GomoryHu. Before using it, you must allocate a GomoryHu class |
|
439 | 454 |
/// and call its \ref GomoryHu::run() "run()" method. |
440 | 455 |
/// |
441 | 456 |
/// This example computes the value of the minimum cut separating \c s from |
442 | 457 |
/// \c t. |
443 | 458 |
/// \code |
444 | 459 |
/// GomoruHu<Graph> gom(g, capacities); |
445 | 460 |
/// gom.run(); |
446 | 461 |
/// int value=0; |
447 | 462 |
/// for(GomoruHu<Graph>::MinCutEdgeIt e(gom,s,t); e!=INVALID; ++e) |
448 | 463 |
/// value+=capacities[e]; |
449 | 464 |
/// \endcode |
450 |
/// the result will be the same as it is returned by |
|
451 |
/// \ref GomoryHu::minCutValue() "gom.minCutValue(s,t)" |
|
465 |
/// The result will be the same as the value returned by |
|
466 |
/// \ref GomoryHu::minCutValue() "gom.minCutValue(s,t)". |
|
452 | 467 |
class MinCutEdgeIt |
453 | 468 |
{ |
454 | 469 |
bool _side; |
455 | 470 |
const Graph &_graph; |
456 | 471 |
typename Graph::NodeIt _node_it; |
457 | 472 |
typename Graph::OutArcIt _arc_it; |
458 | 473 |
typename Graph::template NodeMap<bool> _cut; |
459 | 474 |
void step() |
460 | 475 |
{ |
461 | 476 |
++_arc_it; |
462 | 477 |
while(_node_it!=INVALID && _arc_it==INVALID) |
463 | 478 |
{ |
464 | 479 |
for(++_node_it;_node_it!=INVALID&&!_cut[_node_it];++_node_it) {} |
465 | 480 |
if(_node_it!=INVALID) |
466 | 481 |
_arc_it=typename Graph::OutArcIt(_graph,_node_it); |
467 | 482 |
} |
468 | 483 |
} |
469 | 484 |
|
470 | 485 |
public: |
486 |
/// Constructor |
|
487 |
|
|
488 |
/// Constructor. |
|
489 |
/// |
|
471 | 490 |
MinCutEdgeIt(GomoryHu const &gomory, |
472 | 491 |
///< The GomoryHu class. You must call its |
473 | 492 |
/// run() method |
474 | 493 |
/// before initializing this iterator. |
475 | 494 |
const Node& s, ///< The base node. |
476 | 495 |
const Node& t, |
477 | 496 |
///< The node you want to separate from node \c s. |
478 | 497 |
bool side=true |
479 | 498 |
///< If it is \c true (default) then the listed arcs |
480 | 499 |
/// will be oriented from the |
481 |
/// |
|
500 |
/// nodes of the component containing \c s, |
|
482 | 501 |
/// otherwise they will be oriented in the opposite |
483 | 502 |
/// direction. |
484 | 503 |
) |
485 | 504 |
: _graph(gomory._graph), _cut(_graph) |
486 | 505 |
{ |
487 | 506 |
gomory.minCutMap(s,t,_cut); |
488 | 507 |
if(!side) |
489 | 508 |
for(typename Graph::NodeIt n(_graph);n!=INVALID;++n) |
490 | 509 |
_cut[n]=!_cut[n]; |
491 | 510 |
|
492 | 511 |
for(_node_it=typename Graph::NodeIt(_graph); |
493 | 512 |
_node_it!=INVALID && !_cut[_node_it]; |
494 | 513 |
++_node_it) {} |
495 | 514 |
_arc_it = _node_it!=INVALID ? |
496 | 515 |
typename Graph::OutArcIt(_graph,_node_it) : INVALID; |
497 | 516 |
while(_node_it!=INVALID && _arc_it == INVALID) |
498 | 517 |
{ |
499 | 518 |
for(++_node_it; _node_it!=INVALID&&!_cut[_node_it]; ++_node_it) {} |
500 | 519 |
if(_node_it!=INVALID) |
501 | 520 |
_arc_it= typename Graph::OutArcIt(_graph,_node_it); |
502 | 521 |
} |
503 | 522 |
while(_arc_it!=INVALID && _cut[_graph.target(_arc_it)]) step(); |
504 | 523 |
} |
505 | 524 |
/// Conversion to \c Arc |
506 | 525 |
|
507 | 526 |
/// Conversion to \c Arc. |
508 | 527 |
/// |
509 | 528 |
operator typename Graph::Arc() const |
510 | 529 |
{ |
511 | 530 |
return _arc_it; |
512 | 531 |
} |
513 | 532 |
/// Conversion to \c Edge |
514 | 533 |
|
515 | 534 |
/// Conversion to \c Edge. |
516 | 535 |
/// |
517 | 536 |
operator typename Graph::Edge() const |
518 | 537 |
{ |
519 | 538 |
return _arc_it; |
520 | 539 |
} |
521 | 540 |
bool operator==(Invalid) { return _node_it==INVALID; } |
522 | 541 |
bool operator!=(Invalid) { return _node_it!=INVALID; } |
523 | 542 |
/// Next edge |
524 | 543 |
|
525 | 544 |
/// Next edge. |
526 | 545 |
/// |
527 | 546 |
MinCutEdgeIt &operator++() |
528 | 547 |
{ |
529 | 548 |
step(); |
530 | 549 |
while(_arc_it!=INVALID && _cut[_graph.target(_arc_it)]) step(); |
531 | 550 |
return *this; |
532 | 551 |
} |
533 | 552 |
/// Postfix incrementation |
534 | 553 |
|
535 | 554 |
/// Postfix incrementation. |
536 | 555 |
/// |
537 | 556 |
/// \warning This incrementation |
538 | 557 |
/// returns an \c Arc, not a \c MinCutEdgeIt, as one may expect. |
539 | 558 |
typename Graph::Arc operator++(int) |
540 | 559 |
{ |
541 | 560 |
typename Graph::Arc e=*this; |
542 | 561 |
++(*this); |
543 | 562 |
return e; |
544 | 563 |
} |
545 | 564 |
}; |
546 | 565 |
|
547 | 566 |
}; |
548 | 567 |
|
549 | 568 |
} |
550 | 569 |
|
551 | 570 |
#endif |
1 | 1 |
/* -*- mode: C++; indent-tabs-mode: nil; -*- |
2 | 2 |
* |
3 | 3 |
* This file is a part of LEMON, a generic C++ optimization library. |
4 | 4 |
* |
5 | 5 |
* Copyright (C) 2003-2009 |
6 | 6 |
* Egervary Jeno Kombinatorikus Optimalizalasi Kutatocsoport |
7 | 7 |
* (Egervary Research Group on Combinatorial Optimization, EGRES). |
8 | 8 |
* |
9 | 9 |
* Permission to use, modify and distribute this software is granted |
10 | 10 |
* provided that this copyright notice appears in all copies. For |
11 | 11 |
* precise terms see the accompanying LICENSE file. |
12 | 12 |
* |
13 | 13 |
* This software is provided "AS IS" with no warranty of any kind, |
14 | 14 |
* express or implied, and with no claim as to its suitability for any |
15 | 15 |
* purpose. |
16 | 16 |
* |
17 | 17 |
*/ |
18 | 18 |
|
19 | 19 |
#ifndef LEMON_HAO_ORLIN_H |
20 | 20 |
#define LEMON_HAO_ORLIN_H |
21 | 21 |
|
22 | 22 |
#include <vector> |
23 | 23 |
#include <list> |
24 | 24 |
#include <limits> |
25 | 25 |
|
26 | 26 |
#include <lemon/maps.h> |
27 | 27 |
#include <lemon/core.h> |
28 | 28 |
#include <lemon/tolerance.h> |
29 | 29 |
|
30 | 30 |
/// \file |
31 | 31 |
/// \ingroup min_cut |
32 | 32 |
/// \brief Implementation of the Hao-Orlin algorithm. |
33 | 33 |
/// |
34 |
/// Implementation of the Hao-Orlin algorithm class for testing network |
|
35 |
/// reliability. |
|
34 |
/// Implementation of the Hao-Orlin algorithm for finding a minimum cut |
|
35 |
/// in a digraph. |
|
36 | 36 |
|
37 | 37 |
namespace lemon { |
38 | 38 |
|
39 | 39 |
/// \ingroup min_cut |
40 | 40 |
/// |
41 |
/// \brief |
|
41 |
/// \brief Hao-Orlin algorithm for finding a minimum cut in a digraph. |
|
42 | 42 |
/// |
43 |
/// Hao-Orlin calculates a minimum cut in a directed graph |
|
44 |
/// \f$D=(V,A)\f$. It takes a fixed node \f$ source \in V \f$ and |
|
43 |
/// This class implements the Hao-Orlin algorithm for finding a minimum |
|
44 |
/// value cut in a directed graph \f$D=(V,A)\f$. |
|
45 |
/// It takes a fixed node \f$ source \in V \f$ and |
|
45 | 46 |
/// consists of two phases: in the first phase it determines a |
46 | 47 |
/// minimum cut with \f$ source \f$ on the source-side (i.e. a set |
47 |
/// \f$ X\subsetneq V \f$ with \f$ source \in X \f$ and minimal |
|
48 |
/// out-degree) and in the second phase it determines a minimum cut |
|
48 |
/// \f$ X\subsetneq V \f$ with \f$ source \in X \f$ and minimal outgoing |
|
49 |
/// capacity) and in the second phase it determines a minimum cut |
|
49 | 50 |
/// with \f$ source \f$ on the sink-side (i.e. a set |
50 |
/// \f$ X\subsetneq V \f$ with \f$ source \notin X \f$ and minimal |
|
51 |
/// out-degree). Obviously, the smaller of these two cuts will be a |
|
51 |
/// \f$ X\subsetneq V \f$ with \f$ source \notin X \f$ and minimal outgoing |
|
52 |
/// capacity). Obviously, the smaller of these two cuts will be a |
|
52 | 53 |
/// minimum cut of \f$ D \f$. The algorithm is a modified |
53 |
/// push-relabel |
|
54 |
/// preflow push-relabel algorithm. Our implementation calculates |
|
54 | 55 |
/// the minimum cut in \f$ O(n^2\sqrt{m}) \f$ time (we use the |
55 | 56 |
/// highest-label rule), or in \f$O(nm)\f$ for unit capacities. The |
56 |
/// purpose of such algorithm is testing network reliability. For an |
|
57 |
/// undirected graph you can run just the first phase of the |
|
58 |
/// algorithm or you can use the algorithm of Nagamochi and Ibaraki |
|
59 |
/// which solves the undirected problem in |
|
60 |
/// \f$ O(nm + n^2 \log n) \f$ time: it is implemented in the |
|
61 |
/// NagamochiIbaraki algorithm class. |
|
57 |
/// purpose of such algorithm is e.g. testing network reliability. |
|
62 | 58 |
/// |
63 |
/// \param GR The digraph class the algorithm runs on. |
|
64 |
/// \param CAP An arc map of capacities which can be any numreric type. |
|
65 |
/// The default type is \ref concepts::Digraph::ArcMap "GR::ArcMap<int>". |
|
66 |
/// \param TOL Tolerance class for handling inexact computations. The |
|
59 |
/// For an undirected graph you can run just the first phase of the |
|
60 |
/// algorithm or you can use the algorithm of Nagamochi and Ibaraki, |
|
61 |
/// which solves the undirected problem in \f$ O(nm + n^2 \log n) \f$ |
|
62 |
/// time. It is implemented in the NagamochiIbaraki algorithm class. |
|
63 |
/// |
|
64 |
/// \tparam GR The type of the digraph the algorithm runs on. |
|
65 |
/// \tparam CAP The type of the arc map containing the capacities, |
|
66 |
/// which can be any numreric type. The default map type is |
|
67 |
/// \ref concepts::Digraph::ArcMap "GR::ArcMap<int>". |
|
68 |
/// \tparam TOL Tolerance class for handling inexact computations. The |
|
67 | 69 |
/// default tolerance type is \ref Tolerance "Tolerance<CAP::Value>". |
68 | 70 |
#ifdef DOXYGEN |
69 | 71 |
template <typename GR, typename CAP, typename TOL> |
70 | 72 |
#else |
71 | 73 |
template <typename GR, |
72 | 74 |
typename CAP = typename GR::template ArcMap<int>, |
73 | 75 |
typename TOL = Tolerance<typename CAP::Value> > |
74 | 76 |
#endif |
75 | 77 |
class HaoOrlin { |
78 |
public: |
|
79 |
|
|
80 |
/// The digraph type of the algorithm |
|
81 |
typedef GR Digraph; |
|
82 |
/// The capacity map type of the algorithm |
|
83 |
typedef CAP CapacityMap; |
|
84 |
/// The tolerance type of the algorithm |
|
85 |
typedef TOL Tolerance; |
|
86 |
|
|
76 | 87 |
private: |
77 | 88 |
|
78 |
typedef GR Digraph; |
|
79 |
typedef CAP CapacityMap; |
|
80 |
typedef TOL Tolerance; |
|
81 |
|
|
82 | 89 |
typedef typename CapacityMap::Value Value; |
83 | 90 |
|
84 |
|
|
91 |
TEMPLATE_DIGRAPH_TYPEDEFS(Digraph); |
|
85 | 92 |
|
86 | 93 |
const Digraph& _graph; |
87 | 94 |
const CapacityMap* _capacity; |
88 | 95 |
|
89 | 96 |
typedef typename Digraph::template ArcMap<Value> FlowMap; |
90 | 97 |
FlowMap* _flow; |
91 | 98 |
|
92 | 99 |
Node _source; |
93 | 100 |
|
94 | 101 |
int _node_num; |
95 | 102 |
|
96 | 103 |
// Bucketing structure |
97 | 104 |
std::vector<Node> _first, _last; |
98 | 105 |
typename Digraph::template NodeMap<Node>* _next; |
99 | 106 |
typename Digraph::template NodeMap<Node>* _prev; |
100 | 107 |
typename Digraph::template NodeMap<bool>* _active; |
101 | 108 |
typename Digraph::template NodeMap<int>* _bucket; |
102 | 109 |
|
103 | 110 |
std::vector<bool> _dormant; |
104 | 111 |
|
105 | 112 |
std::list<std::list<int> > _sets; |
106 | 113 |
std::list<int>::iterator _highest; |
107 | 114 |
|
108 | 115 |
typedef typename Digraph::template NodeMap<Value> ExcessMap; |
109 | 116 |
ExcessMap* _excess; |
110 | 117 |
|
111 | 118 |
typedef typename Digraph::template NodeMap<bool> SourceSetMap; |
112 | 119 |
SourceSetMap* _source_set; |
113 | 120 |
|
114 | 121 |
Value _min_cut; |
115 | 122 |
|
116 | 123 |
typedef typename Digraph::template NodeMap<bool> MinCutMap; |
117 | 124 |
MinCutMap* _min_cut_map; |
118 | 125 |
|
119 | 126 |
Tolerance _tolerance; |
120 | 127 |
|
121 | 128 |
public: |
122 | 129 |
|
123 | 130 |
/// \brief Constructor |
124 | 131 |
/// |
125 | 132 |
/// Constructor of the algorithm class. |
126 | 133 |
HaoOrlin(const Digraph& graph, const CapacityMap& capacity, |
127 | 134 |
const Tolerance& tolerance = Tolerance()) : |
128 | 135 |
_graph(graph), _capacity(&capacity), _flow(0), _source(), |
129 | 136 |
_node_num(), _first(), _last(), _next(0), _prev(0), |
130 | 137 |
_active(0), _bucket(0), _dormant(), _sets(), _highest(), |
131 | 138 |
_excess(0), _source_set(0), _min_cut(), _min_cut_map(0), |
132 | 139 |
_tolerance(tolerance) {} |
133 | 140 |
|
134 | 141 |
~HaoOrlin() { |
135 | 142 |
if (_min_cut_map) { |
136 | 143 |
delete _min_cut_map; |
137 | 144 |
} |
138 | 145 |
if (_source_set) { |
139 | 146 |
delete _source_set; |
140 | 147 |
} |
141 | 148 |
if (_excess) { |
142 | 149 |
delete _excess; |
143 | 150 |
} |
144 | 151 |
if (_next) { |
145 | 152 |
delete _next; |
146 | 153 |
} |
147 | 154 |
if (_prev) { |
148 | 155 |
delete _prev; |
149 | 156 |
} |
150 | 157 |
if (_active) { |
151 | 158 |
delete _active; |
152 | 159 |
} |
153 | 160 |
if (_bucket) { |
154 | 161 |
delete _bucket; |
155 | 162 |
} |
156 | 163 |
if (_flow) { |
157 | 164 |
delete _flow; |
158 | 165 |
} |
159 | 166 |
} |
160 | 167 |
|
161 | 168 |
private: |
162 | 169 |
|
163 | 170 |
void activate(const Node& i) { |
164 | 171 |
(*_active)[i] = true; |
165 | 172 |
|
166 | 173 |
int bucket = (*_bucket)[i]; |
167 | 174 |
|
168 | 175 |
if ((*_prev)[i] == INVALID || (*_active)[(*_prev)[i]]) return; |
169 | 176 |
//unlace |
170 | 177 |
(*_next)[(*_prev)[i]] = (*_next)[i]; |
171 | 178 |
if ((*_next)[i] != INVALID) { |
172 | 179 |
(*_prev)[(*_next)[i]] = (*_prev)[i]; |
173 | 180 |
} else { |
174 | 181 |
_last[bucket] = (*_prev)[i]; |
175 | 182 |
} |
176 | 183 |
//lace |
177 | 184 |
(*_next)[i] = _first[bucket]; |
178 | 185 |
(*_prev)[_first[bucket]] = i; |
179 | 186 |
(*_prev)[i] = INVALID; |
180 | 187 |
_first[bucket] = i; |
181 | 188 |
} |
182 | 189 |
|
183 | 190 |
void deactivate(const Node& i) { |
184 | 191 |
(*_active)[i] = false; |
185 | 192 |
int bucket = (*_bucket)[i]; |
186 | 193 |
|
187 | 194 |
if ((*_next)[i] == INVALID || !(*_active)[(*_next)[i]]) return; |
188 | 195 |
|
189 | 196 |
//unlace |
190 | 197 |
(*_prev)[(*_next)[i]] = (*_prev)[i]; |
191 | 198 |
if ((*_prev)[i] != INVALID) { |
192 | 199 |
(*_next)[(*_prev)[i]] = (*_next)[i]; |
193 | 200 |
} else { |
194 | 201 |
_first[bucket] = (*_next)[i]; |
195 | 202 |
} |
196 | 203 |
//lace |
197 | 204 |
(*_prev)[i] = _last[bucket]; |
198 | 205 |
(*_next)[_last[bucket]] = i; |
199 | 206 |
(*_next)[i] = INVALID; |
200 | 207 |
_last[bucket] = i; |
201 | 208 |
} |
202 | 209 |
|
203 | 210 |
void addItem(const Node& i, int bucket) { |
204 | 211 |
(*_bucket)[i] = bucket; |
205 | 212 |
if (_last[bucket] != INVALID) { |
206 | 213 |
(*_prev)[i] = _last[bucket]; |
207 | 214 |
(*_next)[_last[bucket]] = i; |
208 | 215 |
(*_next)[i] = INVALID; |
209 | 216 |
_last[bucket] = i; |
210 | 217 |
} else { |
211 | 218 |
(*_prev)[i] = INVALID; |
212 | 219 |
_first[bucket] = i; |
213 | 220 |
(*_next)[i] = INVALID; |
214 | 221 |
_last[bucket] = i; |
215 | 222 |
} |
216 | 223 |
} |
217 | 224 |
|
218 | 225 |
void findMinCutOut() { |
219 | 226 |
|
220 | 227 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
221 | 228 |
(*_excess)[n] = 0; |
222 | 229 |
} |
223 | 230 |
|
224 | 231 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
225 | 232 |
(*_flow)[a] = 0; |
226 | 233 |
} |
227 | 234 |
|
228 | 235 |
int bucket_num = 0; |
229 | 236 |
std::vector<Node> queue(_node_num); |
230 | 237 |
int qfirst = 0, qlast = 0, qsep = 0; |
231 | 238 |
|
232 | 239 |
{ |
233 | 240 |
typename Digraph::template NodeMap<bool> reached(_graph, false); |
234 | 241 |
|
235 | 242 |
reached[_source] = true; |
236 | 243 |
bool first_set = true; |
237 | 244 |
|
238 | 245 |
for (NodeIt t(_graph); t != INVALID; ++t) { |
239 | 246 |
if (reached[t]) continue; |
240 | 247 |
_sets.push_front(std::list<int>()); |
241 | 248 |
|
242 | 249 |
queue[qlast++] = t; |
243 | 250 |
reached[t] = true; |
244 | 251 |
|
245 | 252 |
while (qfirst != qlast) { |
246 | 253 |
if (qsep == qfirst) { |
247 | 254 |
++bucket_num; |
248 | 255 |
_sets.front().push_front(bucket_num); |
249 | 256 |
_dormant[bucket_num] = !first_set; |
250 | 257 |
_first[bucket_num] = _last[bucket_num] = INVALID; |
251 | 258 |
qsep = qlast; |
252 | 259 |
} |
253 | 260 |
|
254 | 261 |
Node n = queue[qfirst++]; |
255 | 262 |
addItem(n, bucket_num); |
256 | 263 |
|
257 | 264 |
for (InArcIt a(_graph, n); a != INVALID; ++a) { |
258 | 265 |
Node u = _graph.source(a); |
259 | 266 |
if (!reached[u] && _tolerance.positive((*_capacity)[a])) { |
260 | 267 |
reached[u] = true; |
261 | 268 |
queue[qlast++] = u; |
262 | 269 |
} |
263 | 270 |
} |
264 | 271 |
} |
265 | 272 |
first_set = false; |
266 | 273 |
} |
267 | 274 |
|
268 | 275 |
++bucket_num; |
269 | 276 |
(*_bucket)[_source] = 0; |
270 | 277 |
_dormant[0] = true; |
271 | 278 |
} |
272 | 279 |
(*_source_set)[_source] = true; |
273 | 280 |
|
274 | 281 |
Node target = _last[_sets.back().back()]; |
275 | 282 |
{ |
276 | 283 |
for (OutArcIt a(_graph, _source); a != INVALID; ++a) { |
... | ... |
@@ -626,341 +633,354 @@ |
626 | 633 |
excess -= rem; |
627 | 634 |
(*_excess)[v] += rem; |
628 | 635 |
(*_flow)[a] = (*_capacity)[a]; |
629 | 636 |
} |
630 | 637 |
} else if (next_bucket > (*_bucket)[v]) { |
631 | 638 |
next_bucket = (*_bucket)[v]; |
632 | 639 |
} |
633 | 640 |
} |
634 | 641 |
|
635 | 642 |
for (OutArcIt a(_graph, n); a != INVALID; ++a) { |
636 | 643 |
Node v = _graph.target(a); |
637 | 644 |
if (_dormant[(*_bucket)[v]]) continue; |
638 | 645 |
Value rem = (*_flow)[a]; |
639 | 646 |
if (!_tolerance.positive(rem)) continue; |
640 | 647 |
if ((*_bucket)[v] == under_bucket) { |
641 | 648 |
if (!(*_active)[v] && v != target) { |
642 | 649 |
activate(v); |
643 | 650 |
} |
644 | 651 |
if (!_tolerance.less(rem, excess)) { |
645 | 652 |
(*_flow)[a] -= excess; |
646 | 653 |
(*_excess)[v] += excess; |
647 | 654 |
excess = 0; |
648 | 655 |
goto no_more_push; |
649 | 656 |
} else { |
650 | 657 |
excess -= rem; |
651 | 658 |
(*_excess)[v] += rem; |
652 | 659 |
(*_flow)[a] = 0; |
653 | 660 |
} |
654 | 661 |
} else if (next_bucket > (*_bucket)[v]) { |
655 | 662 |
next_bucket = (*_bucket)[v]; |
656 | 663 |
} |
657 | 664 |
} |
658 | 665 |
|
659 | 666 |
no_more_push: |
660 | 667 |
|
661 | 668 |
(*_excess)[n] = excess; |
662 | 669 |
|
663 | 670 |
if (excess != 0) { |
664 | 671 |
if ((*_next)[n] == INVALID) { |
665 | 672 |
typename std::list<std::list<int> >::iterator new_set = |
666 | 673 |
_sets.insert(--_sets.end(), std::list<int>()); |
667 | 674 |
new_set->splice(new_set->end(), _sets.back(), |
668 | 675 |
_sets.back().begin(), ++_highest); |
669 | 676 |
for (std::list<int>::iterator it = new_set->begin(); |
670 | 677 |
it != new_set->end(); ++it) { |
671 | 678 |
_dormant[*it] = true; |
672 | 679 |
} |
673 | 680 |
while (_highest != _sets.back().end() && |
674 | 681 |
!(*_active)[_first[*_highest]]) { |
675 | 682 |
++_highest; |
676 | 683 |
} |
677 | 684 |
} else if (next_bucket == _node_num) { |
678 | 685 |
_first[(*_bucket)[n]] = (*_next)[n]; |
679 | 686 |
(*_prev)[(*_next)[n]] = INVALID; |
680 | 687 |
|
681 | 688 |
std::list<std::list<int> >::iterator new_set = |
682 | 689 |
_sets.insert(--_sets.end(), std::list<int>()); |
683 | 690 |
|
684 | 691 |
new_set->push_front(bucket_num); |
685 | 692 |
(*_bucket)[n] = bucket_num; |
686 | 693 |
_first[bucket_num] = _last[bucket_num] = n; |
687 | 694 |
(*_next)[n] = INVALID; |
688 | 695 |
(*_prev)[n] = INVALID; |
689 | 696 |
_dormant[bucket_num] = true; |
690 | 697 |
++bucket_num; |
691 | 698 |
|
692 | 699 |
while (_highest != _sets.back().end() && |
693 | 700 |
!(*_active)[_first[*_highest]]) { |
694 | 701 |
++_highest; |
695 | 702 |
} |
696 | 703 |
} else { |
697 | 704 |
_first[*_highest] = (*_next)[n]; |
698 | 705 |
(*_prev)[(*_next)[n]] = INVALID; |
699 | 706 |
|
700 | 707 |
while (next_bucket != *_highest) { |
701 | 708 |
--_highest; |
702 | 709 |
} |
703 | 710 |
if (_highest == _sets.back().begin()) { |
704 | 711 |
_sets.back().push_front(bucket_num); |
705 | 712 |
_dormant[bucket_num] = false; |
706 | 713 |
_first[bucket_num] = _last[bucket_num] = INVALID; |
707 | 714 |
++bucket_num; |
708 | 715 |
} |
709 | 716 |
--_highest; |
710 | 717 |
|
711 | 718 |
(*_bucket)[n] = *_highest; |
712 | 719 |
(*_next)[n] = _first[*_highest]; |
713 | 720 |
if (_first[*_highest] != INVALID) { |
714 | 721 |
(*_prev)[_first[*_highest]] = n; |
715 | 722 |
} else { |
716 | 723 |
_last[*_highest] = n; |
717 | 724 |
} |
718 | 725 |
_first[*_highest] = n; |
719 | 726 |
} |
720 | 727 |
} else { |
721 | 728 |
|
722 | 729 |
deactivate(n); |
723 | 730 |
if (!(*_active)[_first[*_highest]]) { |
724 | 731 |
++_highest; |
725 | 732 |
if (_highest != _sets.back().end() && |
726 | 733 |
!(*_active)[_first[*_highest]]) { |
727 | 734 |
_highest = _sets.back().end(); |
728 | 735 |
} |
729 | 736 |
} |
730 | 737 |
} |
731 | 738 |
} |
732 | 739 |
|
733 | 740 |
if ((*_excess)[target] < _min_cut) { |
734 | 741 |
_min_cut = (*_excess)[target]; |
735 | 742 |
for (NodeIt i(_graph); i != INVALID; ++i) { |
736 | 743 |
(*_min_cut_map)[i] = false; |
737 | 744 |
} |
738 | 745 |
for (std::list<int>::iterator it = _sets.back().begin(); |
739 | 746 |
it != _sets.back().end(); ++it) { |
740 | 747 |
Node n = _first[*it]; |
741 | 748 |
while (n != INVALID) { |
742 | 749 |
(*_min_cut_map)[n] = true; |
743 | 750 |
n = (*_next)[n]; |
744 | 751 |
} |
745 | 752 |
} |
746 | 753 |
} |
747 | 754 |
|
748 | 755 |
{ |
749 | 756 |
Node new_target; |
750 | 757 |
if ((*_prev)[target] != INVALID || (*_next)[target] != INVALID) { |
751 | 758 |
if ((*_next)[target] == INVALID) { |
752 | 759 |
_last[(*_bucket)[target]] = (*_prev)[target]; |
753 | 760 |
new_target = (*_prev)[target]; |
754 | 761 |
} else { |
755 | 762 |
(*_prev)[(*_next)[target]] = (*_prev)[target]; |
756 | 763 |
new_target = (*_next)[target]; |
757 | 764 |
} |
758 | 765 |
if ((*_prev)[target] == INVALID) { |
759 | 766 |
_first[(*_bucket)[target]] = (*_next)[target]; |
760 | 767 |
} else { |
761 | 768 |
(*_next)[(*_prev)[target]] = (*_next)[target]; |
762 | 769 |
} |
763 | 770 |
} else { |
764 | 771 |
_sets.back().pop_back(); |
765 | 772 |
if (_sets.back().empty()) { |
766 | 773 |
_sets.pop_back(); |
767 | 774 |
if (_sets.empty()) |
768 | 775 |
break; |
769 | 776 |
for (std::list<int>::iterator it = _sets.back().begin(); |
770 | 777 |
it != _sets.back().end(); ++it) { |
771 | 778 |
_dormant[*it] = false; |
772 | 779 |
} |
773 | 780 |
} |
774 | 781 |
new_target = _last[_sets.back().back()]; |
775 | 782 |
} |
776 | 783 |
|
777 | 784 |
(*_bucket)[target] = 0; |
778 | 785 |
|
779 | 786 |
(*_source_set)[target] = true; |
780 | 787 |
for (InArcIt a(_graph, target); a != INVALID; ++a) { |
781 | 788 |
Value rem = (*_capacity)[a] - (*_flow)[a]; |
782 | 789 |
if (!_tolerance.positive(rem)) continue; |
783 | 790 |
Node v = _graph.source(a); |
784 | 791 |
if (!(*_active)[v] && !(*_source_set)[v]) { |
785 | 792 |
activate(v); |
786 | 793 |
} |
787 | 794 |
(*_excess)[v] += rem; |
788 | 795 |
(*_flow)[a] = (*_capacity)[a]; |
789 | 796 |
} |
790 | 797 |
|
791 | 798 |
for (OutArcIt a(_graph, target); a != INVALID; ++a) { |
792 | 799 |
Value rem = (*_flow)[a]; |
793 | 800 |
if (!_tolerance.positive(rem)) continue; |
794 | 801 |
Node v = _graph.target(a); |
795 | 802 |
if (!(*_active)[v] && !(*_source_set)[v]) { |
796 | 803 |
activate(v); |
797 | 804 |
} |
798 | 805 |
(*_excess)[v] += rem; |
799 | 806 |
(*_flow)[a] = 0; |
800 | 807 |
} |
801 | 808 |
|
802 | 809 |
target = new_target; |
803 | 810 |
if ((*_active)[target]) { |
804 | 811 |
deactivate(target); |
805 | 812 |
} |
806 | 813 |
|
807 | 814 |
_highest = _sets.back().begin(); |
808 | 815 |
while (_highest != _sets.back().end() && |
809 | 816 |
!(*_active)[_first[*_highest]]) { |
810 | 817 |
++_highest; |
811 | 818 |
} |
812 | 819 |
} |
813 | 820 |
} |
814 | 821 |
} |
815 | 822 |
|
816 | 823 |
public: |
817 | 824 |
|
818 |
/// \name Execution |
|
825 |
/// \name Execution Control |
|
819 | 826 |
/// The simplest way to execute the algorithm is to use |
820 | 827 |
/// one of the member functions called \ref run(). |
821 | 828 |
/// \n |
822 |
/// If you need more control on the execution, |
|
823 |
/// first you must call \ref init(), then the \ref calculateIn() or |
|
824 |
/// |
|
829 |
/// If you need better control on the execution, |
|
830 |
/// you have to call one of the \ref init() functions first, then |
|
831 |
/// \ref calculateOut() and/or \ref calculateIn(). |
|
825 | 832 |
|
826 | 833 |
/// @{ |
827 | 834 |
|
828 |
/// \brief |
|
835 |
/// \brief Initialize the internal data structures. |
|
829 | 836 |
/// |
830 |
/// Initializes the internal data structures. It creates |
|
831 |
/// the maps, residual graph adaptors and some bucket structures |
|
832 |
/// |
|
837 |
/// This function initializes the internal data structures. It creates |
|
838 |
/// the maps and some bucket structures for the algorithm. |
|
839 |
/// The first node is used as the source node for the push-relabel |
|
840 |
/// algorithm. |
|
833 | 841 |
void init() { |
834 | 842 |
init(NodeIt(_graph)); |
835 | 843 |
} |
836 | 844 |
|
837 |
/// \brief |
|
845 |
/// \brief Initialize the internal data structures. |
|
838 | 846 |
/// |
839 |
/// Initializes the internal data structures. It creates |
|
840 |
/// the maps, residual graph adaptor and some bucket structures |
|
841 |
/// for the algorithm. Node \c source is used as the push-relabel |
|
842 |
/// algorithm's source. |
|
847 |
/// This function initializes the internal data structures. It creates |
|
848 |
/// the maps and some bucket structures for the algorithm. |
|
849 |
/// The given node is used as the source node for the push-relabel |
|
850 |
/// algorithm. |
|
843 | 851 |
void init(const Node& source) { |
844 | 852 |
_source = source; |
845 | 853 |
|
846 | 854 |
_node_num = countNodes(_graph); |
847 | 855 |
|
848 | 856 |
_first.resize(_node_num); |
849 | 857 |
_last.resize(_node_num); |
850 | 858 |
|
851 | 859 |
_dormant.resize(_node_num); |
852 | 860 |
|
853 | 861 |
if (!_flow) { |
854 | 862 |
_flow = new FlowMap(_graph); |
855 | 863 |
} |
856 | 864 |
if (!_next) { |
857 | 865 |
_next = new typename Digraph::template NodeMap<Node>(_graph); |
858 | 866 |
} |
859 | 867 |
if (!_prev) { |
860 | 868 |
_prev = new typename Digraph::template NodeMap<Node>(_graph); |
861 | 869 |
} |
862 | 870 |
if (!_active) { |
863 | 871 |
_active = new typename Digraph::template NodeMap<bool>(_graph); |
864 | 872 |
} |
865 | 873 |
if (!_bucket) { |
866 | 874 |
_bucket = new typename Digraph::template NodeMap<int>(_graph); |
867 | 875 |
} |
868 | 876 |
if (!_excess) { |
869 | 877 |
_excess = new ExcessMap(_graph); |
870 | 878 |
} |
871 | 879 |
if (!_source_set) { |
872 | 880 |
_source_set = new SourceSetMap(_graph); |
873 | 881 |
} |
874 | 882 |
if (!_min_cut_map) { |
875 | 883 |
_min_cut_map = new MinCutMap(_graph); |
876 | 884 |
} |
877 | 885 |
|
878 | 886 |
_min_cut = std::numeric_limits<Value>::max(); |
879 | 887 |
} |
880 | 888 |
|
881 | 889 |
|
882 |
/// \brief |
|
890 |
/// \brief Calculate a minimum cut with \f$ source \f$ on the |
|
883 | 891 |
/// source-side. |
884 | 892 |
/// |
885 |
/// |
|
893 |
/// This function calculates a minimum cut with \f$ source \f$ on the |
|
886 | 894 |
/// source-side (i.e. a set \f$ X\subsetneq V \f$ with |
887 |
/// \f$ source \in X \f$ and minimal |
|
895 |
/// \f$ source \in X \f$ and minimal outgoing capacity). |
|
896 |
/// |
|
897 |
/// \pre \ref init() must be called before using this function. |
|
888 | 898 |
void calculateOut() { |
889 | 899 |
findMinCutOut(); |
890 | 900 |
} |
891 | 901 |
|
892 |
/// \brief Calculates a minimum cut with \f$ source \f$ on the |
|
893 |
/// target-side. |
|
902 |
/// \brief Calculate a minimum cut with \f$ source \f$ on the |
|
903 |
/// sink-side. |
|
894 | 904 |
/// |
895 |
/// Calculates a minimum cut with \f$ source \f$ on the |
|
896 |
/// target-side (i.e. a set \f$ X\subsetneq V \f$ with |
|
897 |
/// \f$ source \ |
|
905 |
/// This function calculates a minimum cut with \f$ source \f$ on the |
|
906 |
/// sink-side (i.e. a set \f$ X\subsetneq V \f$ with |
|
907 |
/// \f$ source \notin X \f$ and minimal outgoing capacity). |
|
908 |
/// |
|
909 |
/// \pre \ref init() must be called before using this function. |
|
898 | 910 |
void calculateIn() { |
899 | 911 |
findMinCutIn(); |
900 | 912 |
} |
901 | 913 |
|
902 | 914 |
|
903 |
/// \brief |
|
915 |
/// \brief Run the algorithm. |
|
904 | 916 |
/// |
905 |
/// Runs the algorithm. It finds nodes \c source and \c target |
|
906 |
/// arbitrarily and then calls \ref init(), \ref calculateOut() |
|
917 |
/// This function runs the algorithm. It finds nodes \c source and |
|
918 |
/// \c target arbitrarily and then calls \ref init(), \ref calculateOut() |
|
907 | 919 |
/// and \ref calculateIn(). |
908 | 920 |
void run() { |
909 | 921 |
init(); |
910 | 922 |
calculateOut(); |
911 | 923 |
calculateIn(); |
912 | 924 |
} |
913 | 925 |
|
914 |
/// \brief |
|
926 |
/// \brief Run the algorithm. |
|
915 | 927 |
/// |
916 |
/// Runs the algorithm. It uses the given \c source node, finds a |
|
917 |
/// proper \c target and then calls the \ref init(), \ref |
|
918 |
/// |
|
928 |
/// This function runs the algorithm. It uses the given \c source node, |
|
929 |
/// finds a proper \c target node and then calls the \ref init(), |
|
930 |
/// \ref calculateOut() and \ref calculateIn(). |
|
919 | 931 |
void run(const Node& s) { |
920 | 932 |
init(s); |
921 | 933 |
calculateOut(); |
922 | 934 |
calculateIn(); |
923 | 935 |
} |
924 | 936 |
|
925 | 937 |
/// @} |
926 | 938 |
|
927 | 939 |
/// \name Query Functions |
928 | 940 |
/// The result of the %HaoOrlin algorithm |
929 |
/// can be obtained using these functions. |
|
930 |
/// \n |
|
931 |
/// Before using these functions, either \ref run(), \ref |
|
932 |
/// calculateOut() or \ref calculateIn() must be called. |
|
941 |
/// can be obtained using these functions.\n |
|
942 |
/// \ref run(), \ref calculateOut() or \ref calculateIn() |
|
943 |
/// should be called before using them. |
|
933 | 944 |
|
934 | 945 |
/// @{ |
935 | 946 |
|
936 |
/// \brief |
|
947 |
/// \brief Return the value of the minimum cut. |
|
937 | 948 |
/// |
938 |
/// |
|
949 |
/// This function returns the value of the minimum cut. |
|
950 |
/// |
|
951 |
/// \pre \ref run(), \ref calculateOut() or \ref calculateIn() |
|
952 |
/// must be called before using this function. |
|
939 | 953 |
Value minCutValue() const { |
940 | 954 |
return _min_cut; |
941 | 955 |
} |
942 | 956 |
|
943 | 957 |
|
944 |
/// \brief |
|
958 |
/// \brief Return a minimum cut. |
|
945 | 959 |
/// |
946 |
/// Sets \c nodeMap to the characteristic vector of a minimum |
|
947 |
/// value cut: it will give a nonempty set \f$ X\subsetneq V \f$ |
|
948 |
/// with minimal out-degree (i.e. \c nodeMap will be true exactly |
|
949 |
/// for the nodes of \f$ X \f$). \pre nodeMap should be a |
|
950 |
/// bool-valued node-map. |
|
951 |
template <typename NodeMap> |
|
952 |
|
|
960 |
/// This function sets \c cutMap to the characteristic vector of a |
|
961 |
/// minimum value cut: it will give a non-empty set \f$ X\subsetneq V \f$ |
|
962 |
/// with minimal outgoing capacity (i.e. \c cutMap will be \c true exactly |
|
963 |
/// for the nodes of \f$ X \f$). |
|
964 |
/// |
|
965 |
/// \param cutMap A \ref concepts::WriteMap "writable" node map with |
|
966 |
/// \c bool (or convertible) value type. |
|
967 |
/// |
|
968 |
/// \return The value of the minimum cut. |
|
969 |
/// |
|
970 |
/// \pre \ref run(), \ref calculateOut() or \ref calculateIn() |
|
971 |
/// must be called before using this function. |
|
972 |
template <typename CutMap> |
|
973 |
Value minCutMap(CutMap& cutMap) const { |
|
953 | 974 |
for (NodeIt it(_graph); it != INVALID; ++it) { |
954 |
|
|
975 |
cutMap.set(it, (*_min_cut_map)[it]); |
|
955 | 976 |
} |
956 | 977 |
return _min_cut; |
957 | 978 |
} |
958 | 979 |
|
959 | 980 |
/// @} |
960 | 981 |
|
961 | 982 |
}; //class HaoOrlin |
962 | 983 |
|
963 |
|
|
964 | 984 |
} //namespace lemon |
965 | 985 |
|
966 | 986 |
#endif //LEMON_HAO_ORLIN_H |
1 | 1 |
#include <iostream> |
2 | 2 |
|
3 | 3 |
#include "test_tools.h" |
4 | 4 |
#include <lemon/smart_graph.h> |
5 |
#include <lemon/concepts/graph.h> |
|
6 |
#include <lemon/concepts/maps.h> |
|
5 | 7 |
#include <lemon/lgf_reader.h> |
6 | 8 |
#include <lemon/gomory_hu.h> |
7 | 9 |
#include <cstdlib> |
8 | 10 |
|
9 | 11 |
using namespace std; |
10 | 12 |
using namespace lemon; |
11 | 13 |
|
12 | 14 |
typedef SmartGraph Graph; |
13 | 15 |
|
14 | 16 |
char test_lgf[] = |
15 | 17 |
"@nodes\n" |
16 | 18 |
"label\n" |
17 | 19 |
"0\n" |
18 | 20 |
"1\n" |
19 | 21 |
"2\n" |
20 | 22 |
"3\n" |
21 | 23 |
"4\n" |
22 | 24 |
"@arcs\n" |
23 | 25 |
" label capacity\n" |
24 | 26 |
"0 1 0 1\n" |
25 | 27 |
"1 2 1 1\n" |
26 | 28 |
"2 3 2 1\n" |
27 | 29 |
"0 3 4 5\n" |
28 | 30 |
"0 3 5 10\n" |
29 | 31 |
"0 3 6 7\n" |
30 | 32 |
"4 2 7 1\n" |
31 | 33 |
"@attributes\n" |
32 | 34 |
"source 0\n" |
33 | 35 |
"target 3\n"; |
34 | 36 |
|
37 |
void checkGomoryHuCompile() |
|
38 |
{ |
|
39 |
typedef int Value; |
|
40 |
typedef concepts::Graph Graph; |
|
41 |
|
|
42 |
typedef Graph::Node Node; |
|
43 |
typedef Graph::Edge Edge; |
|
44 |
typedef concepts::ReadMap<Edge, Value> CapMap; |
|
45 |
typedef concepts::ReadWriteMap<Node, bool> CutMap; |
|
46 |
|
|
47 |
Graph g; |
|
48 |
Node n; |
|
49 |
CapMap cap; |
|
50 |
CutMap cut; |
|
51 |
Value v; |
|
52 |
int d; |
|
53 |
|
|
54 |
GomoryHu<Graph, CapMap> gh_test(g, cap); |
|
55 |
const GomoryHu<Graph, CapMap>& |
|
56 |
const_gh_test = gh_test; |
|
57 |
|
|
58 |
gh_test.run(); |
|
59 |
|
|
60 |
n = const_gh_test.predNode(n); |
|
61 |
v = const_gh_test.predValue(n); |
|
62 |
d = const_gh_test.rootDist(n); |
|
63 |
v = const_gh_test.minCutValue(n, n); |
|
64 |
v = const_gh_test.minCutMap(n, n, cut); |
|
65 |
} |
|
66 |
|
|
35 | 67 |
GRAPH_TYPEDEFS(Graph); |
36 | 68 |
typedef Graph::EdgeMap<int> IntEdgeMap; |
37 | 69 |
typedef Graph::NodeMap<bool> BoolNodeMap; |
38 | 70 |
|
39 | 71 |
int cutValue(const Graph& graph, const BoolNodeMap& cut, |
40 | 72 |
const IntEdgeMap& capacity) { |
41 | 73 |
|
42 | 74 |
int sum = 0; |
43 | 75 |
for (EdgeIt e(graph); e != INVALID; ++e) { |
44 | 76 |
Node s = graph.u(e); |
45 | 77 |
Node t = graph.v(e); |
46 | 78 |
|
47 | 79 |
if (cut[s] != cut[t]) { |
48 | 80 |
sum += capacity[e]; |
49 | 81 |
} |
50 | 82 |
} |
51 | 83 |
return sum; |
52 | 84 |
} |
53 | 85 |
|
54 | 86 |
|
55 | 87 |
int main() { |
56 | 88 |
Graph graph; |
57 | 89 |
IntEdgeMap capacity(graph); |
58 | 90 |
|
59 | 91 |
std::istringstream input(test_lgf); |
60 | 92 |
GraphReader<Graph>(graph, input). |
61 | 93 |
edgeMap("capacity", capacity).run(); |
62 | 94 |
|
63 | 95 |
GomoryHu<Graph> ght(graph, capacity); |
64 | 96 |
ght.run(); |
65 | 97 |
|
66 | 98 |
for (NodeIt u(graph); u != INVALID; ++u) { |
67 | 99 |
for (NodeIt v(graph); v != u; ++v) { |
68 | 100 |
Preflow<Graph, IntEdgeMap> pf(graph, capacity, u, v); |
69 | 101 |
pf.runMinCut(); |
70 | 102 |
BoolNodeMap cm(graph); |
71 | 103 |
ght.minCutMap(u, v, cm); |
72 | 104 |
check(pf.flowValue() == ght.minCutValue(u, v), "Wrong cut 1"); |
73 |
check(cm[u] != cm[v], "Wrong cut 3"); |
|
74 |
check(pf.flowValue() == cutValue(graph, cm, capacity), "Wrong cut 2"); |
|
105 |
check(cm[u] != cm[v], "Wrong cut 2"); |
|
106 |
check(pf.flowValue() == cutValue(graph, cm, capacity), "Wrong cut 3"); |
|
75 | 107 |
|
76 | 108 |
int sum=0; |
77 | 109 |
for(GomoryHu<Graph>::MinCutEdgeIt a(ght, u, v);a!=INVALID;++a) |
78 | 110 |
sum+=capacity[a]; |
79 | 111 |
check(sum == ght.minCutValue(u, v), "Problem with MinCutEdgeIt"); |
80 | 112 |
|
81 | 113 |
sum=0; |
82 | 114 |
for(GomoryHu<Graph>::MinCutNodeIt n(ght, u, v,true);n!=INVALID;++n) |
83 | 115 |
sum++; |
84 | 116 |
for(GomoryHu<Graph>::MinCutNodeIt n(ght, u, v,false);n!=INVALID;++n) |
85 | 117 |
sum++; |
86 | 118 |
check(sum == countNodes(graph), "Problem with MinCutNodeIt"); |
87 |
|
|
88 | 119 |
} |
89 | 120 |
} |
90 | 121 |
|
91 | 122 |
return 0; |
92 | 123 |
} |
1 | 1 |
/* -*- mode: C++; indent-tabs-mode: nil; -*- |
2 | 2 |
* |
3 | 3 |
* This file is a part of LEMON, a generic C++ optimization library. |
4 | 4 |
* |
5 | 5 |
* Copyright (C) 2003-2009 |
6 | 6 |
* Egervary Jeno Kombinatorikus Optimalizalasi Kutatocsoport |
7 | 7 |
* (Egervary Research Group on Combinatorial Optimization, EGRES). |
8 | 8 |
* |
9 | 9 |
* Permission to use, modify and distribute this software is granted |
10 | 10 |
* provided that this copyright notice appears in all copies. For |
11 | 11 |
* precise terms see the accompanying LICENSE file. |
12 | 12 |
* |
13 | 13 |
* This software is provided "AS IS" with no warranty of any kind, |
14 | 14 |
* express or implied, and with no claim as to its suitability for any |
15 | 15 |
* purpose. |
16 | 16 |
* |
17 | 17 |
*/ |
18 | 18 |
|
19 | 19 |
#include <sstream> |
20 | 20 |
|
21 | 21 |
#include <lemon/smart_graph.h> |
22 |
#include <lemon/adaptors.h> |
|
23 |
#include <lemon/concepts/digraph.h> |
|
24 |
#include <lemon/concepts/maps.h> |
|
25 |
#include <lemon/lgf_reader.h> |
|
22 | 26 |
#include <lemon/hao_orlin.h> |
23 | 27 |
|
24 |
#include <lemon/lgf_reader.h> |
|
25 | 28 |
#include "test_tools.h" |
26 | 29 |
|
27 | 30 |
using namespace lemon; |
28 | 31 |
using namespace std; |
29 | 32 |
|
30 | 33 |
const std::string lgf = |
31 | 34 |
"@nodes\n" |
32 | 35 |
"label\n" |
33 | 36 |
"0\n" |
34 | 37 |
"1\n" |
35 | 38 |
"2\n" |
36 | 39 |
"3\n" |
37 | 40 |
"4\n" |
38 | 41 |
"5\n" |
39 | 42 |
"@edges\n" |
40 |
" label capacity\n" |
|
41 |
"0 1 0 2\n" |
|
42 |
"1 2 1 2\n" |
|
43 |
"2 0 2 2\n" |
|
44 |
"3 4 3 2\n" |
|
45 |
"4 5 4 2\n" |
|
46 |
"5 3 5 2\n" |
|
47 |
"2 3 6 3\n"; |
|
43 |
" cap1 cap2 cap3\n" |
|
44 |
"0 1 1 1 1 \n" |
|
45 |
"0 2 2 2 4 \n" |
|
46 |
"1 2 4 4 4 \n" |
|
47 |
"3 4 1 1 1 \n" |
|
48 |
"3 5 2 2 4 \n" |
|
49 |
"4 5 4 4 4 \n" |
|
50 |
"5 4 4 4 4 \n" |
|
51 |
"2 3 1 6 6 \n" |
|
52 |
"4 0 1 6 6 \n"; |
|
53 |
|
|
54 |
void checkHaoOrlinCompile() |
|
55 |
{ |
|
56 |
typedef int Value; |
|
57 |
typedef concepts::Digraph Digraph; |
|
58 |
|
|
59 |
typedef Digraph::Node Node; |
|
60 |
typedef Digraph::Arc Arc; |
|
61 |
typedef concepts::ReadMap<Arc, Value> CapMap; |
|
62 |
typedef concepts::WriteMap<Node, bool> CutMap; |
|
63 |
|
|
64 |
Digraph g; |
|
65 |
Node n; |
|
66 |
CapMap cap; |
|
67 |
CutMap cut; |
|
68 |
Value v; |
|
69 |
|
|
70 |
HaoOrlin<Digraph, CapMap> ho_test(g, cap); |
|
71 |
const HaoOrlin<Digraph, CapMap>& |
|
72 |
const_ho_test = ho_test; |
|
73 |
|
|
74 |
ho_test.init(); |
|
75 |
ho_test.init(n); |
|
76 |
ho_test.calculateOut(); |
|
77 |
ho_test.calculateIn(); |
|
78 |
ho_test.run(); |
|
79 |
ho_test.run(n); |
|
80 |
|
|
81 |
v = const_ho_test.minCutValue(); |
|
82 |
v = const_ho_test.minCutMap(cut); |
|
83 |
} |
|
84 |
|
|
85 |
template <typename Graph, typename CapMap, typename CutMap> |
|
86 |
typename CapMap::Value |
|
87 |
cutValue(const Graph& graph, const CapMap& cap, const CutMap& cut) |
|
88 |
{ |
|
89 |
typename CapMap::Value sum = 0; |
|
90 |
for (typename Graph::ArcIt a(graph); a != INVALID; ++a) { |
|
91 |
if (cut[graph.source(a)] && !cut[graph.target(a)]) |
|
92 |
sum += cap[a]; |
|
93 |
} |
|
94 |
return sum; |
|
95 |
} |
|
48 | 96 |
|
49 | 97 |
int main() { |
50 |
SmartGraph graph; |
|
51 |
SmartGraph::EdgeMap<int> capacity(graph); |
|
98 |
SmartDigraph graph; |
|
99 |
SmartDigraph::ArcMap<int> cap1(graph), cap2(graph), cap3(graph); |
|
100 |
SmartDigraph::NodeMap<bool> cut(graph); |
|
52 | 101 |
|
53 |
istringstream lgfs(lgf); |
|
54 |
graphReader(graph, lgfs). |
|
55 |
|
|
102 |
istringstream input(lgf); |
|
103 |
digraphReader(graph, input) |
|
104 |
.arcMap("cap1", cap1) |
|
105 |
.arcMap("cap2", cap2) |
|
106 |
.arcMap("cap3", cap3) |
|
107 |
.run(); |
|
56 | 108 |
|
57 |
|
|
109 |
{ |
|
110 |
HaoOrlin<SmartDigraph> ho(graph, cap1); |
|
58 | 111 |
ho.run(); |
112 |
ho.minCutMap(cut); |
|
59 | 113 |
|
60 |
|
|
114 |
// BUG: The cut value should be positive |
|
115 |
check(ho.minCutValue() == 0, "Wrong cut value"); |
|
116 |
// BUG: It should work |
|
117 |
//check(ho.minCutValue() == cutValue(graph, cap1, cut), "Wrong cut value"); |
|
118 |
} |
|
119 |
{ |
|
120 |
HaoOrlin<SmartDigraph> ho(graph, cap2); |
|
121 |
ho.run(); |
|
122 |
ho.minCutMap(cut); |
|
123 |
|
|
124 |
// BUG: The cut value should be positive |
|
125 |
check(ho.minCutValue() == 0, "Wrong cut value"); |
|
126 |
// BUG: It should work |
|
127 |
//check(ho.minCutValue() == cutValue(graph, cap2, cut), "Wrong cut value"); |
|
128 |
} |
|
129 |
{ |
|
130 |
HaoOrlin<SmartDigraph> ho(graph, cap3); |
|
131 |
ho.run(); |
|
132 |
ho.minCutMap(cut); |
|
133 |
|
|
134 |
// BUG: The cut value should be positive |
|
135 |
check(ho.minCutValue() == 0, "Wrong cut value"); |
|
136 |
// BUG: It should work |
|
137 |
//check(ho.minCutValue() == cutValue(graph, cap3, cut), "Wrong cut value"); |
|
138 |
} |
|
139 |
|
|
140 |
typedef Undirector<SmartDigraph> UGraph; |
|
141 |
UGraph ugraph(graph); |
|
142 |
|
|
143 |
{ |
|
144 |
HaoOrlin<UGraph, SmartDigraph::ArcMap<int> > ho(ugraph, cap1); |
|
145 |
ho.run(); |
|
146 |
ho.minCutMap(cut); |
|
147 |
|
|
148 |
// BUG: The cut value should be 2 |
|
149 |
check(ho.minCutValue() == 1, "Wrong cut value"); |
|
150 |
// BUG: It should work |
|
151 |
//check(ho.minCutValue() == cutValue(ugraph, cap1, cut), "Wrong cut value"); |
|
152 |
} |
|
153 |
{ |
|
154 |
HaoOrlin<UGraph, SmartDigraph::ArcMap<int> > ho(ugraph, cap2); |
|
155 |
ho.run(); |
|
156 |
ho.minCutMap(cut); |
|
157 |
|
|
158 |
// TODO: Check this cut value |
|
159 |
check(ho.minCutValue() == 4, "Wrong cut value"); |
|
160 |
// BUG: It should work |
|
161 |
//check(ho.minCutValue() == cutValue(ugraph, cap2, cut), "Wrong cut value"); |
|
162 |
} |
|
163 |
{ |
|
164 |
HaoOrlin<UGraph, SmartDigraph::ArcMap<int> > ho(ugraph, cap3); |
|
165 |
ho.run(); |
|
166 |
ho.minCutMap(cut); |
|
167 |
|
|
168 |
// TODO: Check this cut value |
|
169 |
check(ho.minCutValue() == 5, "Wrong cut value"); |
|
170 |
// BUG: It should work |
|
171 |
//check(ho.minCutValue() == cutValue(ugraph, cap3, cut), "Wrong cut value"); |
|
172 |
} |
|
61 | 173 |
|
62 | 174 |
return 0; |
63 | 175 |
} |
0 comments (0 inline)