1 | 1 |
/* -*- mode: C++; indent-tabs-mode: nil; -*- |
2 | 2 |
* |
3 | 3 |
* This file is a part of LEMON, a generic C++ optimization library. |
4 | 4 |
* |
5 | 5 |
* Copyright (C) 2003-2010 |
6 | 6 |
* Egervary Jeno Kombinatorikus Optimalizalasi Kutatocsoport |
7 | 7 |
* (Egervary Research Group on Combinatorial Optimization, EGRES). |
8 | 8 |
* |
9 | 9 |
* Permission to use, modify and distribute this software is granted |
10 | 10 |
* provided that this copyright notice appears in all copies. For |
11 | 11 |
* precise terms see the accompanying LICENSE file. |
12 | 12 |
* |
13 | 13 |
* This software is provided "AS IS" with no warranty of any kind, |
14 | 14 |
* express or implied, and with no claim as to its suitability for any |
15 | 15 |
* purpose. |
16 | 16 |
* |
17 | 17 |
*/ |
18 | 18 |
|
19 | 19 |
#ifndef LEMON_HAO_ORLIN_H |
20 | 20 |
#define LEMON_HAO_ORLIN_H |
21 | 21 |
|
22 | 22 |
#include <vector> |
23 | 23 |
#include <list> |
24 | 24 |
#include <limits> |
25 | 25 |
|
26 | 26 |
#include <lemon/maps.h> |
27 | 27 |
#include <lemon/core.h> |
28 | 28 |
#include <lemon/tolerance.h> |
29 | 29 |
|
30 | 30 |
/// \file |
31 | 31 |
/// \ingroup min_cut |
32 | 32 |
/// \brief Implementation of the Hao-Orlin algorithm. |
33 | 33 |
/// |
34 | 34 |
/// Implementation of the Hao-Orlin algorithm for finding a minimum cut |
35 | 35 |
/// in a digraph. |
36 | 36 |
|
37 | 37 |
namespace lemon { |
38 | 38 |
|
39 | 39 |
/// \ingroup min_cut |
40 | 40 |
/// |
41 | 41 |
/// \brief Hao-Orlin algorithm for finding a minimum cut in a digraph. |
42 | 42 |
/// |
43 | 43 |
/// This class implements the Hao-Orlin algorithm for finding a minimum |
44 | 44 |
/// value cut in a directed graph \f$D=(V,A)\f$. |
45 | 45 |
/// It takes a fixed node \f$ source \in V \f$ and |
46 | 46 |
/// consists of two phases: in the first phase it determines a |
47 | 47 |
/// minimum cut with \f$ source \f$ on the source-side (i.e. a set |
48 | 48 |
/// \f$ X\subsetneq V \f$ with \f$ source \in X \f$ and minimal outgoing |
49 | 49 |
/// capacity) and in the second phase it determines a minimum cut |
50 | 50 |
/// with \f$ source \f$ on the sink-side (i.e. a set |
51 | 51 |
/// \f$ X\subsetneq V \f$ with \f$ source \notin X \f$ and minimal outgoing |
52 | 52 |
/// capacity). Obviously, the smaller of these two cuts will be a |
53 | 53 |
/// minimum cut of \f$ D \f$. The algorithm is a modified |
54 | 54 |
/// preflow push-relabel algorithm. Our implementation calculates |
55 | 55 |
/// the minimum cut in \f$ O(n^2\sqrt{m}) \f$ time (we use the |
56 |
/// highest-label rule), or in \f$O(nm)\f$ for unit capacities. The |
|
57 |
/// purpose of such algorithm is e.g. testing network reliability. |
|
56 |
/// highest-label rule), or in \f$O(nm)\f$ for unit capacities. A notable |
|
57 |
/// use of this algorithm is testing network reliability. |
|
58 | 58 |
/// |
59 | 59 |
/// For an undirected graph you can run just the first phase of the |
60 | 60 |
/// algorithm or you can use the algorithm of Nagamochi and Ibaraki, |
61 | 61 |
/// which solves the undirected problem in \f$ O(nm + n^2 \log n) \f$ |
62 | 62 |
/// time. It is implemented in the NagamochiIbaraki algorithm class. |
63 | 63 |
/// |
64 | 64 |
/// \tparam GR The type of the digraph the algorithm runs on. |
65 | 65 |
/// \tparam CAP The type of the arc map containing the capacities, |
66 | 66 |
/// which can be any numreric type. The default map type is |
67 | 67 |
/// \ref concepts::Digraph::ArcMap "GR::ArcMap<int>". |
68 | 68 |
/// \tparam TOL Tolerance class for handling inexact computations. The |
69 | 69 |
/// default tolerance type is \ref Tolerance "Tolerance<CAP::Value>". |
70 | 70 |
#ifdef DOXYGEN |
71 | 71 |
template <typename GR, typename CAP, typename TOL> |
72 | 72 |
#else |
73 | 73 |
template <typename GR, |
74 | 74 |
typename CAP = typename GR::template ArcMap<int>, |
75 | 75 |
typename TOL = Tolerance<typename CAP::Value> > |
76 | 76 |
#endif |
77 | 77 |
class HaoOrlin { |
78 | 78 |
public: |
79 | 79 |
|
80 | 80 |
/// The digraph type of the algorithm |
81 | 81 |
typedef GR Digraph; |
82 | 82 |
/// The capacity map type of the algorithm |
83 | 83 |
typedef CAP CapacityMap; |
84 | 84 |
/// The tolerance type of the algorithm |
85 | 85 |
typedef TOL Tolerance; |
86 | 86 |
|
87 | 87 |
private: |
88 | 88 |
|
89 | 89 |
typedef typename CapacityMap::Value Value; |
90 | 90 |
|
91 | 91 |
TEMPLATE_DIGRAPH_TYPEDEFS(Digraph); |
92 | 92 |
|
93 | 93 |
const Digraph& _graph; |
94 | 94 |
const CapacityMap* _capacity; |
95 | 95 |
|
96 | 96 |
typedef typename Digraph::template ArcMap<Value> FlowMap; |
97 | 97 |
FlowMap* _flow; |
98 | 98 |
|
99 | 99 |
Node _source; |
100 | 100 |
|
101 | 101 |
int _node_num; |
102 | 102 |
|
103 | 103 |
// Bucketing structure |
104 | 104 |
std::vector<Node> _first, _last; |
105 | 105 |
typename Digraph::template NodeMap<Node>* _next; |
106 | 106 |
typename Digraph::template NodeMap<Node>* _prev; |
107 | 107 |
typename Digraph::template NodeMap<bool>* _active; |
108 | 108 |
typename Digraph::template NodeMap<int>* _bucket; |
109 | 109 |
|
110 | 110 |
std::vector<bool> _dormant; |
111 | 111 |
|
112 | 112 |
std::list<std::list<int> > _sets; |
113 | 113 |
std::list<int>::iterator _highest; |
114 | 114 |
|
115 | 115 |
typedef typename Digraph::template NodeMap<Value> ExcessMap; |
116 | 116 |
ExcessMap* _excess; |
117 | 117 |
|
118 | 118 |
typedef typename Digraph::template NodeMap<bool> SourceSetMap; |
119 | 119 |
SourceSetMap* _source_set; |
120 | 120 |
|
121 | 121 |
Value _min_cut; |
122 | 122 |
|
123 | 123 |
typedef typename Digraph::template NodeMap<bool> MinCutMap; |
124 | 124 |
MinCutMap* _min_cut_map; |
125 | 125 |
|
126 | 126 |
Tolerance _tolerance; |
127 | 127 |
|
128 | 128 |
public: |
129 | 129 |
|
130 | 130 |
/// \brief Constructor |
131 | 131 |
/// |
132 | 132 |
/// Constructor of the algorithm class. |
133 | 133 |
HaoOrlin(const Digraph& graph, const CapacityMap& capacity, |
134 | 134 |
const Tolerance& tolerance = Tolerance()) : |
135 | 135 |
_graph(graph), _capacity(&capacity), _flow(0), _source(), |
136 | 136 |
_node_num(), _first(), _last(), _next(0), _prev(0), |
137 | 137 |
_active(0), _bucket(0), _dormant(), _sets(), _highest(), |
138 | 138 |
_excess(0), _source_set(0), _min_cut(), _min_cut_map(0), |
139 | 139 |
_tolerance(tolerance) {} |
140 | 140 |
|
141 | 141 |
~HaoOrlin() { |
142 | 142 |
if (_min_cut_map) { |
143 | 143 |
delete _min_cut_map; |
144 | 144 |
} |
145 | 145 |
if (_source_set) { |
146 | 146 |
delete _source_set; |
147 | 147 |
} |
148 | 148 |
if (_excess) { |
149 | 149 |
delete _excess; |
150 | 150 |
} |
151 | 151 |
if (_next) { |
152 | 152 |
delete _next; |
153 | 153 |
} |
154 | 154 |
if (_prev) { |
155 | 155 |
delete _prev; |
156 | 156 |
} |
157 | 157 |
if (_active) { |
158 | 158 |
delete _active; |
159 | 159 |
} |
160 | 160 |
if (_bucket) { |
161 | 161 |
delete _bucket; |
162 | 162 |
} |
163 | 163 |
if (_flow) { |
164 | 164 |
delete _flow; |
165 | 165 |
} |
166 | 166 |
} |
167 | 167 |
|
168 | 168 |
/// \brief Set the tolerance used by the algorithm. |
169 | 169 |
/// |
170 | 170 |
/// This function sets the tolerance object used by the algorithm. |
171 | 171 |
/// \return <tt>(*this)</tt> |
172 | 172 |
HaoOrlin& tolerance(const Tolerance& tolerance) { |
173 | 173 |
_tolerance = tolerance; |
174 | 174 |
return *this; |
175 | 175 |
} |
176 | 176 |
|
177 | 177 |
/// \brief Returns a const reference to the tolerance. |
178 | 178 |
/// |
179 | 179 |
/// This function returns a const reference to the tolerance object |
180 | 180 |
/// used by the algorithm. |
181 | 181 |
const Tolerance& tolerance() const { |
182 | 182 |
return _tolerance; |
183 | 183 |
} |
184 | 184 |
|
185 | 185 |
private: |
186 | 186 |
|
187 | 187 |
void activate(const Node& i) { |
188 | 188 |
(*_active)[i] = true; |
189 | 189 |
|
190 | 190 |
int bucket = (*_bucket)[i]; |
191 | 191 |
|
192 | 192 |
if ((*_prev)[i] == INVALID || (*_active)[(*_prev)[i]]) return; |
193 | 193 |
//unlace |
194 | 194 |
(*_next)[(*_prev)[i]] = (*_next)[i]; |
195 | 195 |
if ((*_next)[i] != INVALID) { |
196 | 196 |
(*_prev)[(*_next)[i]] = (*_prev)[i]; |
197 | 197 |
} else { |
198 | 198 |
_last[bucket] = (*_prev)[i]; |
199 | 199 |
} |
200 | 200 |
//lace |
201 | 201 |
(*_next)[i] = _first[bucket]; |
202 | 202 |
(*_prev)[_first[bucket]] = i; |
203 | 203 |
(*_prev)[i] = INVALID; |
204 | 204 |
_first[bucket] = i; |
205 | 205 |
} |
206 | 206 |
|
207 | 207 |
void deactivate(const Node& i) { |
208 | 208 |
(*_active)[i] = false; |
209 | 209 |
int bucket = (*_bucket)[i]; |
210 | 210 |
|
211 | 211 |
if ((*_next)[i] == INVALID || !(*_active)[(*_next)[i]]) return; |
212 | 212 |
|
213 | 213 |
//unlace |
214 | 214 |
(*_prev)[(*_next)[i]] = (*_prev)[i]; |
215 | 215 |
if ((*_prev)[i] != INVALID) { |
216 | 216 |
(*_next)[(*_prev)[i]] = (*_next)[i]; |
217 | 217 |
} else { |
218 | 218 |
_first[bucket] = (*_next)[i]; |
219 | 219 |
} |
220 | 220 |
//lace |
221 | 221 |
(*_prev)[i] = _last[bucket]; |
222 | 222 |
(*_next)[_last[bucket]] = i; |
223 | 223 |
(*_next)[i] = INVALID; |
224 | 224 |
_last[bucket] = i; |
225 | 225 |
} |
226 | 226 |
|
227 | 227 |
void addItem(const Node& i, int bucket) { |
228 | 228 |
(*_bucket)[i] = bucket; |
229 | 229 |
if (_last[bucket] != INVALID) { |
230 | 230 |
(*_prev)[i] = _last[bucket]; |
231 | 231 |
(*_next)[_last[bucket]] = i; |
232 | 232 |
(*_next)[i] = INVALID; |
233 | 233 |
_last[bucket] = i; |
234 | 234 |
} else { |
235 | 235 |
(*_prev)[i] = INVALID; |
236 | 236 |
_first[bucket] = i; |
237 | 237 |
(*_next)[i] = INVALID; |
238 | 238 |
_last[bucket] = i; |
239 | 239 |
} |
240 | 240 |
} |
241 | 241 |
|
242 | 242 |
void findMinCutOut() { |
243 | 243 |
|
244 | 244 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
245 | 245 |
(*_excess)[n] = 0; |
246 | 246 |
(*_source_set)[n] = false; |
247 | 247 |
} |
248 | 248 |
|
249 | 249 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
250 | 250 |
(*_flow)[a] = 0; |
251 | 251 |
} |
252 | 252 |
|
253 | 253 |
int bucket_num = 0; |
254 | 254 |
std::vector<Node> queue(_node_num); |
255 | 255 |
int qfirst = 0, qlast = 0, qsep = 0; |
256 | 256 |
|
257 | 257 |
{ |
258 | 258 |
typename Digraph::template NodeMap<bool> reached(_graph, false); |
259 | 259 |
|
260 | 260 |
reached[_source] = true; |
261 | 261 |
bool first_set = true; |
262 | 262 |
|
263 | 263 |
for (NodeIt t(_graph); t != INVALID; ++t) { |
264 | 264 |
if (reached[t]) continue; |
265 | 265 |
_sets.push_front(std::list<int>()); |
266 | 266 |
|
267 | 267 |
queue[qlast++] = t; |
268 | 268 |
reached[t] = true; |
269 | 269 |
|
270 | 270 |
while (qfirst != qlast) { |
271 | 271 |
if (qsep == qfirst) { |
272 | 272 |
++bucket_num; |
273 | 273 |
_sets.front().push_front(bucket_num); |
274 | 274 |
_dormant[bucket_num] = !first_set; |
275 | 275 |
_first[bucket_num] = _last[bucket_num] = INVALID; |
276 | 276 |
qsep = qlast; |
277 | 277 |
} |
278 | 278 |
|
279 | 279 |
Node n = queue[qfirst++]; |
280 | 280 |
addItem(n, bucket_num); |
281 | 281 |
|
282 | 282 |
for (InArcIt a(_graph, n); a != INVALID; ++a) { |
283 | 283 |
Node u = _graph.source(a); |
284 | 284 |
if (!reached[u] && _tolerance.positive((*_capacity)[a])) { |
285 | 285 |
reached[u] = true; |
286 | 286 |
queue[qlast++] = u; |
287 | 287 |
} |
288 | 288 |
} |
289 | 289 |
} |
290 | 290 |
first_set = false; |
291 | 291 |
} |
292 | 292 |
|
293 | 293 |
++bucket_num; |
294 | 294 |
(*_bucket)[_source] = 0; |
295 | 295 |
_dormant[0] = true; |
296 | 296 |
} |
297 | 297 |
(*_source_set)[_source] = true; |
298 | 298 |
|
299 | 299 |
Node target = _last[_sets.back().back()]; |
300 | 300 |
{ |
301 | 301 |
for (OutArcIt a(_graph, _source); a != INVALID; ++a) { |
302 | 302 |
if (_tolerance.positive((*_capacity)[a])) { |
303 | 303 |
Node u = _graph.target(a); |
304 | 304 |
(*_flow)[a] = (*_capacity)[a]; |
305 | 305 |
(*_excess)[u] += (*_capacity)[a]; |
306 | 306 |
if (!(*_active)[u] && u != _source) { |
307 | 307 |
activate(u); |
308 | 308 |
} |
309 | 309 |
} |
310 | 310 |
} |
311 | 311 |
|
312 | 312 |
if ((*_active)[target]) { |
313 | 313 |
deactivate(target); |
314 | 314 |
} |
315 | 315 |
|
316 | 316 |
_highest = _sets.back().begin(); |
317 | 317 |
while (_highest != _sets.back().end() && |
318 | 318 |
!(*_active)[_first[*_highest]]) { |
319 | 319 |
++_highest; |
320 | 320 |
} |
321 | 321 |
} |
322 | 322 |
|
323 | 323 |
while (true) { |
324 | 324 |
while (_highest != _sets.back().end()) { |
325 | 325 |
Node n = _first[*_highest]; |
326 | 326 |
Value excess = (*_excess)[n]; |
327 | 327 |
int next_bucket = _node_num; |
328 | 328 |
|
329 | 329 |
int under_bucket; |
330 | 330 |
if (++std::list<int>::iterator(_highest) == _sets.back().end()) { |
331 | 331 |
under_bucket = -1; |
332 | 332 |
} else { |
333 | 333 |
under_bucket = *(++std::list<int>::iterator(_highest)); |
334 | 334 |
} |
335 | 335 |
|
336 | 336 |
for (OutArcIt a(_graph, n); a != INVALID; ++a) { |
337 | 337 |
Node v = _graph.target(a); |
338 | 338 |
if (_dormant[(*_bucket)[v]]) continue; |
339 | 339 |
Value rem = (*_capacity)[a] - (*_flow)[a]; |
340 | 340 |
if (!_tolerance.positive(rem)) continue; |
341 | 341 |
if ((*_bucket)[v] == under_bucket) { |
342 | 342 |
if (!(*_active)[v] && v != target) { |
343 | 343 |
activate(v); |
344 | 344 |
} |
345 | 345 |
if (!_tolerance.less(rem, excess)) { |
346 | 346 |
(*_flow)[a] += excess; |
347 | 347 |
(*_excess)[v] += excess; |
348 | 348 |
excess = 0; |
349 | 349 |
goto no_more_push; |
350 | 350 |
} else { |
351 | 351 |
excess -= rem; |
352 | 352 |
(*_excess)[v] += rem; |
353 | 353 |
(*_flow)[a] = (*_capacity)[a]; |
354 | 354 |
} |
355 | 355 |
} else if (next_bucket > (*_bucket)[v]) { |
356 | 356 |
next_bucket = (*_bucket)[v]; |
357 | 357 |
} |
358 | 358 |
} |
359 | 359 |
|
360 | 360 |
for (InArcIt a(_graph, n); a != INVALID; ++a) { |
361 | 361 |
Node v = _graph.source(a); |
362 | 362 |
if (_dormant[(*_bucket)[v]]) continue; |
363 | 363 |
Value rem = (*_flow)[a]; |
364 | 364 |
if (!_tolerance.positive(rem)) continue; |
365 | 365 |
if ((*_bucket)[v] == under_bucket) { |
366 | 366 |
if (!(*_active)[v] && v != target) { |
367 | 367 |
activate(v); |
368 | 368 |
} |
369 | 369 |
if (!_tolerance.less(rem, excess)) { |
370 | 370 |
(*_flow)[a] -= excess; |
371 | 371 |
(*_excess)[v] += excess; |
372 | 372 |
excess = 0; |
373 | 373 |
goto no_more_push; |
374 | 374 |
} else { |
375 | 375 |
excess -= rem; |
376 | 376 |
(*_excess)[v] += rem; |
377 | 377 |
(*_flow)[a] = 0; |
378 | 378 |
} |
379 | 379 |
} else if (next_bucket > (*_bucket)[v]) { |
380 | 380 |
next_bucket = (*_bucket)[v]; |
381 | 381 |
} |
382 | 382 |
} |
383 | 383 |
|
384 | 384 |
no_more_push: |
385 | 385 |
|
386 | 386 |
(*_excess)[n] = excess; |
387 | 387 |
|
388 | 388 |
if (excess != 0) { |
389 | 389 |
if ((*_next)[n] == INVALID) { |
390 | 390 |
typename std::list<std::list<int> >::iterator new_set = |
391 | 391 |
_sets.insert(--_sets.end(), std::list<int>()); |
392 | 392 |
new_set->splice(new_set->end(), _sets.back(), |
393 | 393 |
_sets.back().begin(), ++_highest); |
394 | 394 |
for (std::list<int>::iterator it = new_set->begin(); |
395 | 395 |
it != new_set->end(); ++it) { |
396 | 396 |
_dormant[*it] = true; |
397 | 397 |
} |
398 | 398 |
while (_highest != _sets.back().end() && |
399 | 399 |
!(*_active)[_first[*_highest]]) { |
400 | 400 |
++_highest; |
401 | 401 |
} |
402 | 402 |
} else if (next_bucket == _node_num) { |
403 | 403 |
_first[(*_bucket)[n]] = (*_next)[n]; |
404 | 404 |
(*_prev)[(*_next)[n]] = INVALID; |
405 | 405 |
|
406 | 406 |
std::list<std::list<int> >::iterator new_set = |
407 | 407 |
_sets.insert(--_sets.end(), std::list<int>()); |
408 | 408 |
|
409 | 409 |
new_set->push_front(bucket_num); |
410 | 410 |
(*_bucket)[n] = bucket_num; |
411 | 411 |
_first[bucket_num] = _last[bucket_num] = n; |
412 | 412 |
(*_next)[n] = INVALID; |
413 | 413 |
(*_prev)[n] = INVALID; |
414 | 414 |
_dormant[bucket_num] = true; |
415 | 415 |
++bucket_num; |
416 | 416 |
|
417 | 417 |
while (_highest != _sets.back().end() && |
418 | 418 |
!(*_active)[_first[*_highest]]) { |
419 | 419 |
++_highest; |
420 | 420 |
} |
421 | 421 |
} else { |
422 | 422 |
_first[*_highest] = (*_next)[n]; |
423 | 423 |
(*_prev)[(*_next)[n]] = INVALID; |
424 | 424 |
|
425 | 425 |
while (next_bucket != *_highest) { |
426 | 426 |
--_highest; |
427 | 427 |
} |
428 | 428 |
|
429 | 429 |
if (_highest == _sets.back().begin()) { |
430 | 430 |
_sets.back().push_front(bucket_num); |
431 | 431 |
_dormant[bucket_num] = false; |
432 | 432 |
_first[bucket_num] = _last[bucket_num] = INVALID; |
433 | 433 |
++bucket_num; |
434 | 434 |
} |
435 | 435 |
--_highest; |
436 | 436 |
|
437 | 437 |
(*_bucket)[n] = *_highest; |
438 | 438 |
(*_next)[n] = _first[*_highest]; |
439 | 439 |
if (_first[*_highest] != INVALID) { |
440 | 440 |
(*_prev)[_first[*_highest]] = n; |
441 | 441 |
} else { |
... | ... |
@@ -531,475 +531,485 @@ |
531 | 531 |
} |
532 | 532 |
|
533 | 533 |
_highest = _sets.back().begin(); |
534 | 534 |
while (_highest != _sets.back().end() && |
535 | 535 |
!(*_active)[_first[*_highest]]) { |
536 | 536 |
++_highest; |
537 | 537 |
} |
538 | 538 |
} |
539 | 539 |
} |
540 | 540 |
} |
541 | 541 |
|
542 | 542 |
void findMinCutIn() { |
543 | 543 |
|
544 | 544 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
545 | 545 |
(*_excess)[n] = 0; |
546 | 546 |
(*_source_set)[n] = false; |
547 | 547 |
} |
548 | 548 |
|
549 | 549 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
550 | 550 |
(*_flow)[a] = 0; |
551 | 551 |
} |
552 | 552 |
|
553 | 553 |
int bucket_num = 0; |
554 | 554 |
std::vector<Node> queue(_node_num); |
555 | 555 |
int qfirst = 0, qlast = 0, qsep = 0; |
556 | 556 |
|
557 | 557 |
{ |
558 | 558 |
typename Digraph::template NodeMap<bool> reached(_graph, false); |
559 | 559 |
|
560 | 560 |
reached[_source] = true; |
561 | 561 |
|
562 | 562 |
bool first_set = true; |
563 | 563 |
|
564 | 564 |
for (NodeIt t(_graph); t != INVALID; ++t) { |
565 | 565 |
if (reached[t]) continue; |
566 | 566 |
_sets.push_front(std::list<int>()); |
567 | 567 |
|
568 | 568 |
queue[qlast++] = t; |
569 | 569 |
reached[t] = true; |
570 | 570 |
|
571 | 571 |
while (qfirst != qlast) { |
572 | 572 |
if (qsep == qfirst) { |
573 | 573 |
++bucket_num; |
574 | 574 |
_sets.front().push_front(bucket_num); |
575 | 575 |
_dormant[bucket_num] = !first_set; |
576 | 576 |
_first[bucket_num] = _last[bucket_num] = INVALID; |
577 | 577 |
qsep = qlast; |
578 | 578 |
} |
579 | 579 |
|
580 | 580 |
Node n = queue[qfirst++]; |
581 | 581 |
addItem(n, bucket_num); |
582 | 582 |
|
583 | 583 |
for (OutArcIt a(_graph, n); a != INVALID; ++a) { |
584 | 584 |
Node u = _graph.target(a); |
585 | 585 |
if (!reached[u] && _tolerance.positive((*_capacity)[a])) { |
586 | 586 |
reached[u] = true; |
587 | 587 |
queue[qlast++] = u; |
588 | 588 |
} |
589 | 589 |
} |
590 | 590 |
} |
591 | 591 |
first_set = false; |
592 | 592 |
} |
593 | 593 |
|
594 | 594 |
++bucket_num; |
595 | 595 |
(*_bucket)[_source] = 0; |
596 | 596 |
_dormant[0] = true; |
597 | 597 |
} |
598 | 598 |
(*_source_set)[_source] = true; |
599 | 599 |
|
600 | 600 |
Node target = _last[_sets.back().back()]; |
601 | 601 |
{ |
602 | 602 |
for (InArcIt a(_graph, _source); a != INVALID; ++a) { |
603 | 603 |
if (_tolerance.positive((*_capacity)[a])) { |
604 | 604 |
Node u = _graph.source(a); |
605 | 605 |
(*_flow)[a] = (*_capacity)[a]; |
606 | 606 |
(*_excess)[u] += (*_capacity)[a]; |
607 | 607 |
if (!(*_active)[u] && u != _source) { |
608 | 608 |
activate(u); |
609 | 609 |
} |
610 | 610 |
} |
611 | 611 |
} |
612 | 612 |
if ((*_active)[target]) { |
613 | 613 |
deactivate(target); |
614 | 614 |
} |
615 | 615 |
|
616 | 616 |
_highest = _sets.back().begin(); |
617 | 617 |
while (_highest != _sets.back().end() && |
618 | 618 |
!(*_active)[_first[*_highest]]) { |
619 | 619 |
++_highest; |
620 | 620 |
} |
621 | 621 |
} |
622 | 622 |
|
623 | 623 |
|
624 | 624 |
while (true) { |
625 | 625 |
while (_highest != _sets.back().end()) { |
626 | 626 |
Node n = _first[*_highest]; |
627 | 627 |
Value excess = (*_excess)[n]; |
628 | 628 |
int next_bucket = _node_num; |
629 | 629 |
|
630 | 630 |
int under_bucket; |
631 | 631 |
if (++std::list<int>::iterator(_highest) == _sets.back().end()) { |
632 | 632 |
under_bucket = -1; |
633 | 633 |
} else { |
634 | 634 |
under_bucket = *(++std::list<int>::iterator(_highest)); |
635 | 635 |
} |
636 | 636 |
|
637 | 637 |
for (InArcIt a(_graph, n); a != INVALID; ++a) { |
638 | 638 |
Node v = _graph.source(a); |
639 | 639 |
if (_dormant[(*_bucket)[v]]) continue; |
640 | 640 |
Value rem = (*_capacity)[a] - (*_flow)[a]; |
641 | 641 |
if (!_tolerance.positive(rem)) continue; |
642 | 642 |
if ((*_bucket)[v] == under_bucket) { |
643 | 643 |
if (!(*_active)[v] && v != target) { |
644 | 644 |
activate(v); |
645 | 645 |
} |
646 | 646 |
if (!_tolerance.less(rem, excess)) { |
647 | 647 |
(*_flow)[a] += excess; |
648 | 648 |
(*_excess)[v] += excess; |
649 | 649 |
excess = 0; |
650 | 650 |
goto no_more_push; |
651 | 651 |
} else { |
652 | 652 |
excess -= rem; |
653 | 653 |
(*_excess)[v] += rem; |
654 | 654 |
(*_flow)[a] = (*_capacity)[a]; |
655 | 655 |
} |
656 | 656 |
} else if (next_bucket > (*_bucket)[v]) { |
657 | 657 |
next_bucket = (*_bucket)[v]; |
658 | 658 |
} |
659 | 659 |
} |
660 | 660 |
|
661 | 661 |
for (OutArcIt a(_graph, n); a != INVALID; ++a) { |
662 | 662 |
Node v = _graph.target(a); |
663 | 663 |
if (_dormant[(*_bucket)[v]]) continue; |
664 | 664 |
Value rem = (*_flow)[a]; |
665 | 665 |
if (!_tolerance.positive(rem)) continue; |
666 | 666 |
if ((*_bucket)[v] == under_bucket) { |
667 | 667 |
if (!(*_active)[v] && v != target) { |
668 | 668 |
activate(v); |
669 | 669 |
} |
670 | 670 |
if (!_tolerance.less(rem, excess)) { |
671 | 671 |
(*_flow)[a] -= excess; |
672 | 672 |
(*_excess)[v] += excess; |
673 | 673 |
excess = 0; |
674 | 674 |
goto no_more_push; |
675 | 675 |
} else { |
676 | 676 |
excess -= rem; |
677 | 677 |
(*_excess)[v] += rem; |
678 | 678 |
(*_flow)[a] = 0; |
679 | 679 |
} |
680 | 680 |
} else if (next_bucket > (*_bucket)[v]) { |
681 | 681 |
next_bucket = (*_bucket)[v]; |
682 | 682 |
} |
683 | 683 |
} |
684 | 684 |
|
685 | 685 |
no_more_push: |
686 | 686 |
|
687 | 687 |
(*_excess)[n] = excess; |
688 | 688 |
|
689 | 689 |
if (excess != 0) { |
690 | 690 |
if ((*_next)[n] == INVALID) { |
691 | 691 |
typename std::list<std::list<int> >::iterator new_set = |
692 | 692 |
_sets.insert(--_sets.end(), std::list<int>()); |
693 | 693 |
new_set->splice(new_set->end(), _sets.back(), |
694 | 694 |
_sets.back().begin(), ++_highest); |
695 | 695 |
for (std::list<int>::iterator it = new_set->begin(); |
696 | 696 |
it != new_set->end(); ++it) { |
697 | 697 |
_dormant[*it] = true; |
698 | 698 |
} |
699 | 699 |
while (_highest != _sets.back().end() && |
700 | 700 |
!(*_active)[_first[*_highest]]) { |
701 | 701 |
++_highest; |
702 | 702 |
} |
703 | 703 |
} else if (next_bucket == _node_num) { |
704 | 704 |
_first[(*_bucket)[n]] = (*_next)[n]; |
705 | 705 |
(*_prev)[(*_next)[n]] = INVALID; |
706 | 706 |
|
707 | 707 |
std::list<std::list<int> >::iterator new_set = |
708 | 708 |
_sets.insert(--_sets.end(), std::list<int>()); |
709 | 709 |
|
710 | 710 |
new_set->push_front(bucket_num); |
711 | 711 |
(*_bucket)[n] = bucket_num; |
712 | 712 |
_first[bucket_num] = _last[bucket_num] = n; |
713 | 713 |
(*_next)[n] = INVALID; |
714 | 714 |
(*_prev)[n] = INVALID; |
715 | 715 |
_dormant[bucket_num] = true; |
716 | 716 |
++bucket_num; |
717 | 717 |
|
718 | 718 |
while (_highest != _sets.back().end() && |
719 | 719 |
!(*_active)[_first[*_highest]]) { |
720 | 720 |
++_highest; |
721 | 721 |
} |
722 | 722 |
} else { |
723 | 723 |
_first[*_highest] = (*_next)[n]; |
724 | 724 |
(*_prev)[(*_next)[n]] = INVALID; |
725 | 725 |
|
726 | 726 |
while (next_bucket != *_highest) { |
727 | 727 |
--_highest; |
728 | 728 |
} |
729 | 729 |
if (_highest == _sets.back().begin()) { |
730 | 730 |
_sets.back().push_front(bucket_num); |
731 | 731 |
_dormant[bucket_num] = false; |
732 | 732 |
_first[bucket_num] = _last[bucket_num] = INVALID; |
733 | 733 |
++bucket_num; |
734 | 734 |
} |
735 | 735 |
--_highest; |
736 | 736 |
|
737 | 737 |
(*_bucket)[n] = *_highest; |
738 | 738 |
(*_next)[n] = _first[*_highest]; |
739 | 739 |
if (_first[*_highest] != INVALID) { |
740 | 740 |
(*_prev)[_first[*_highest]] = n; |
741 | 741 |
} else { |
742 | 742 |
_last[*_highest] = n; |
743 | 743 |
} |
744 | 744 |
_first[*_highest] = n; |
745 | 745 |
} |
746 | 746 |
} else { |
747 | 747 |
|
748 | 748 |
deactivate(n); |
749 | 749 |
if (!(*_active)[_first[*_highest]]) { |
750 | 750 |
++_highest; |
751 | 751 |
if (_highest != _sets.back().end() && |
752 | 752 |
!(*_active)[_first[*_highest]]) { |
753 | 753 |
_highest = _sets.back().end(); |
754 | 754 |
} |
755 | 755 |
} |
756 | 756 |
} |
757 | 757 |
} |
758 | 758 |
|
759 | 759 |
if ((*_excess)[target] < _min_cut) { |
760 | 760 |
_min_cut = (*_excess)[target]; |
761 | 761 |
for (NodeIt i(_graph); i != INVALID; ++i) { |
762 | 762 |
(*_min_cut_map)[i] = false; |
763 | 763 |
} |
764 | 764 |
for (std::list<int>::iterator it = _sets.back().begin(); |
765 | 765 |
it != _sets.back().end(); ++it) { |
766 | 766 |
Node n = _first[*it]; |
767 | 767 |
while (n != INVALID) { |
768 | 768 |
(*_min_cut_map)[n] = true; |
769 | 769 |
n = (*_next)[n]; |
770 | 770 |
} |
771 | 771 |
} |
772 | 772 |
} |
773 | 773 |
|
774 | 774 |
{ |
775 | 775 |
Node new_target; |
776 | 776 |
if ((*_prev)[target] != INVALID || (*_next)[target] != INVALID) { |
777 | 777 |
if ((*_next)[target] == INVALID) { |
778 | 778 |
_last[(*_bucket)[target]] = (*_prev)[target]; |
779 | 779 |
new_target = (*_prev)[target]; |
780 | 780 |
} else { |
781 | 781 |
(*_prev)[(*_next)[target]] = (*_prev)[target]; |
782 | 782 |
new_target = (*_next)[target]; |
783 | 783 |
} |
784 | 784 |
if ((*_prev)[target] == INVALID) { |
785 | 785 |
_first[(*_bucket)[target]] = (*_next)[target]; |
786 | 786 |
} else { |
787 | 787 |
(*_next)[(*_prev)[target]] = (*_next)[target]; |
788 | 788 |
} |
789 | 789 |
} else { |
790 | 790 |
_sets.back().pop_back(); |
791 | 791 |
if (_sets.back().empty()) { |
792 | 792 |
_sets.pop_back(); |
793 | 793 |
if (_sets.empty()) |
794 | 794 |
break; |
795 | 795 |
for (std::list<int>::iterator it = _sets.back().begin(); |
796 | 796 |
it != _sets.back().end(); ++it) { |
797 | 797 |
_dormant[*it] = false; |
798 | 798 |
} |
799 | 799 |
} |
800 | 800 |
new_target = _last[_sets.back().back()]; |
801 | 801 |
} |
802 | 802 |
|
803 | 803 |
(*_bucket)[target] = 0; |
804 | 804 |
|
805 | 805 |
(*_source_set)[target] = true; |
806 | 806 |
for (InArcIt a(_graph, target); a != INVALID; ++a) { |
807 | 807 |
Value rem = (*_capacity)[a] - (*_flow)[a]; |
808 | 808 |
if (!_tolerance.positive(rem)) continue; |
809 | 809 |
Node v = _graph.source(a); |
810 | 810 |
if (!(*_active)[v] && !(*_source_set)[v]) { |
811 | 811 |
activate(v); |
812 | 812 |
} |
813 | 813 |
(*_excess)[v] += rem; |
814 | 814 |
(*_flow)[a] = (*_capacity)[a]; |
815 | 815 |
} |
816 | 816 |
|
817 | 817 |
for (OutArcIt a(_graph, target); a != INVALID; ++a) { |
818 | 818 |
Value rem = (*_flow)[a]; |
819 | 819 |
if (!_tolerance.positive(rem)) continue; |
820 | 820 |
Node v = _graph.target(a); |
821 | 821 |
if (!(*_active)[v] && !(*_source_set)[v]) { |
822 | 822 |
activate(v); |
823 | 823 |
} |
824 | 824 |
(*_excess)[v] += rem; |
825 | 825 |
(*_flow)[a] = 0; |
826 | 826 |
} |
827 | 827 |
|
828 | 828 |
target = new_target; |
829 | 829 |
if ((*_active)[target]) { |
830 | 830 |
deactivate(target); |
831 | 831 |
} |
832 | 832 |
|
833 | 833 |
_highest = _sets.back().begin(); |
834 | 834 |
while (_highest != _sets.back().end() && |
835 | 835 |
!(*_active)[_first[*_highest]]) { |
836 | 836 |
++_highest; |
837 | 837 |
} |
838 | 838 |
} |
839 | 839 |
} |
840 | 840 |
} |
841 | 841 |
|
842 | 842 |
public: |
843 | 843 |
|
844 | 844 |
/// \name Execution Control |
845 | 845 |
/// The simplest way to execute the algorithm is to use |
846 | 846 |
/// one of the member functions called \ref run(). |
847 | 847 |
/// \n |
848 | 848 |
/// If you need better control on the execution, |
849 | 849 |
/// you have to call one of the \ref init() functions first, then |
850 | 850 |
/// \ref calculateOut() and/or \ref calculateIn(). |
851 | 851 |
|
852 | 852 |
/// @{ |
853 | 853 |
|
854 | 854 |
/// \brief Initialize the internal data structures. |
855 | 855 |
/// |
856 | 856 |
/// This function initializes the internal data structures. It creates |
857 | 857 |
/// the maps and some bucket structures for the algorithm. |
858 | 858 |
/// The first node is used as the source node for the push-relabel |
859 | 859 |
/// algorithm. |
860 | 860 |
void init() { |
861 | 861 |
init(NodeIt(_graph)); |
862 | 862 |
} |
863 | 863 |
|
864 | 864 |
/// \brief Initialize the internal data structures. |
865 | 865 |
/// |
866 | 866 |
/// This function initializes the internal data structures. It creates |
867 | 867 |
/// the maps and some bucket structures for the algorithm. |
868 | 868 |
/// The given node is used as the source node for the push-relabel |
869 | 869 |
/// algorithm. |
870 | 870 |
void init(const Node& source) { |
871 | 871 |
_source = source; |
872 | 872 |
|
873 | 873 |
_node_num = countNodes(_graph); |
874 | 874 |
|
875 | 875 |
_first.resize(_node_num); |
876 | 876 |
_last.resize(_node_num); |
877 | 877 |
|
878 | 878 |
_dormant.resize(_node_num); |
879 | 879 |
|
880 | 880 |
if (!_flow) { |
881 | 881 |
_flow = new FlowMap(_graph); |
882 | 882 |
} |
883 | 883 |
if (!_next) { |
884 | 884 |
_next = new typename Digraph::template NodeMap<Node>(_graph); |
885 | 885 |
} |
886 | 886 |
if (!_prev) { |
887 | 887 |
_prev = new typename Digraph::template NodeMap<Node>(_graph); |
888 | 888 |
} |
889 | 889 |
if (!_active) { |
890 | 890 |
_active = new typename Digraph::template NodeMap<bool>(_graph); |
891 | 891 |
} |
892 | 892 |
if (!_bucket) { |
893 | 893 |
_bucket = new typename Digraph::template NodeMap<int>(_graph); |
894 | 894 |
} |
895 | 895 |
if (!_excess) { |
896 | 896 |
_excess = new ExcessMap(_graph); |
897 | 897 |
} |
898 | 898 |
if (!_source_set) { |
899 | 899 |
_source_set = new SourceSetMap(_graph); |
900 | 900 |
} |
901 | 901 |
if (!_min_cut_map) { |
902 | 902 |
_min_cut_map = new MinCutMap(_graph); |
903 | 903 |
} |
904 | 904 |
|
905 | 905 |
_min_cut = std::numeric_limits<Value>::max(); |
906 | 906 |
} |
907 | 907 |
|
908 | 908 |
|
909 | 909 |
/// \brief Calculate a minimum cut with \f$ source \f$ on the |
910 | 910 |
/// source-side. |
911 | 911 |
/// |
912 | 912 |
/// This function calculates a minimum cut with \f$ source \f$ on the |
913 | 913 |
/// source-side (i.e. a set \f$ X\subsetneq V \f$ with |
914 | 914 |
/// \f$ source \in X \f$ and minimal outgoing capacity). |
915 |
/// It updates the stored cut if (and only if) the newly found one |
|
916 |
/// is better. |
|
915 | 917 |
/// |
916 | 918 |
/// \pre \ref init() must be called before using this function. |
917 | 919 |
void calculateOut() { |
918 | 920 |
findMinCutOut(); |
919 | 921 |
} |
920 | 922 |
|
921 | 923 |
/// \brief Calculate a minimum cut with \f$ source \f$ on the |
922 | 924 |
/// sink-side. |
923 | 925 |
/// |
924 | 926 |
/// This function calculates a minimum cut with \f$ source \f$ on the |
925 | 927 |
/// sink-side (i.e. a set \f$ X\subsetneq V \f$ with |
926 | 928 |
/// \f$ source \notin X \f$ and minimal outgoing capacity). |
929 |
/// It updates the stored cut if (and only if) the newly found one |
|
930 |
/// is better. |
|
927 | 931 |
/// |
928 | 932 |
/// \pre \ref init() must be called before using this function. |
929 | 933 |
void calculateIn() { |
930 | 934 |
findMinCutIn(); |
931 | 935 |
} |
932 | 936 |
|
933 | 937 |
|
934 | 938 |
/// \brief Run the algorithm. |
935 | 939 |
/// |
936 |
/// This function runs the algorithm. It finds nodes \c source and |
|
937 |
/// \c target arbitrarily and then calls \ref init(), \ref calculateOut() |
|
940 |
/// This function runs the algorithm. It chooses source node, |
|
941 |
/// then calls \ref init(), \ref calculateOut() |
|
938 | 942 |
/// and \ref calculateIn(). |
939 | 943 |
void run() { |
940 | 944 |
init(); |
941 | 945 |
calculateOut(); |
942 | 946 |
calculateIn(); |
943 | 947 |
} |
944 | 948 |
|
945 | 949 |
/// \brief Run the algorithm. |
946 | 950 |
/// |
947 |
/// This function runs the algorithm. It uses the given \c source node, |
|
948 |
/// finds a proper \c target node and then calls the \ref init(), |
|
949 |
/// |
|
951 |
/// This function runs the algorithm. It calls \ref init(), |
|
952 |
/// \ref calculateOut() and \ref calculateIn() with the given |
|
953 |
/// source node. |
|
950 | 954 |
void run(const Node& s) { |
951 | 955 |
init(s); |
952 | 956 |
calculateOut(); |
953 | 957 |
calculateIn(); |
954 | 958 |
} |
955 | 959 |
|
956 | 960 |
/// @} |
957 | 961 |
|
958 | 962 |
/// \name Query Functions |
959 | 963 |
/// The result of the %HaoOrlin algorithm |
960 | 964 |
/// can be obtained using these functions.\n |
961 | 965 |
/// \ref run(), \ref calculateOut() or \ref calculateIn() |
962 | 966 |
/// should be called before using them. |
963 | 967 |
|
964 | 968 |
/// @{ |
965 | 969 |
|
966 | 970 |
/// \brief Return the value of the minimum cut. |
967 | 971 |
/// |
968 |
/// This function returns the value of the |
|
972 |
/// This function returns the value of the best cut found by the |
|
973 |
/// previously called \ref run(), \ref calculateOut() or \ref |
|
974 |
/// calculateIn(). |
|
969 | 975 |
/// |
970 | 976 |
/// \pre \ref run(), \ref calculateOut() or \ref calculateIn() |
971 | 977 |
/// must be called before using this function. |
972 | 978 |
Value minCutValue() const { |
973 | 979 |
return _min_cut; |
974 | 980 |
} |
975 | 981 |
|
976 | 982 |
|
977 | 983 |
/// \brief Return a minimum cut. |
978 | 984 |
/// |
979 |
/// This function sets \c cutMap to the characteristic vector of a |
|
980 |
/// minimum value cut: it will give a non-empty set \f$ X\subsetneq V \f$ |
|
981 |
/// |
|
985 |
/// This function gives the best cut found by the |
|
986 |
/// previously called \ref run(), \ref calculateOut() or \ref |
|
987 |
/// calculateIn(). |
|
988 |
/// |
|
989 |
/// It sets \c cutMap to the characteristic vector of the found |
|
990 |
/// minimum value cut - a non-empty set \f$ X\subsetneq V \f$ |
|
991 |
/// of minimum outgoing capacity (i.e. \c cutMap will be \c true exactly |
|
982 | 992 |
/// for the nodes of \f$ X \f$). |
983 | 993 |
/// |
984 | 994 |
/// \param cutMap A \ref concepts::WriteMap "writable" node map with |
985 | 995 |
/// \c bool (or convertible) value type. |
986 | 996 |
/// |
987 | 997 |
/// \return The value of the minimum cut. |
988 | 998 |
/// |
989 | 999 |
/// \pre \ref run(), \ref calculateOut() or \ref calculateIn() |
990 | 1000 |
/// must be called before using this function. |
991 | 1001 |
template <typename CutMap> |
992 | 1002 |
Value minCutMap(CutMap& cutMap) const { |
993 | 1003 |
for (NodeIt it(_graph); it != INVALID; ++it) { |
994 | 1004 |
cutMap.set(it, (*_min_cut_map)[it]); |
995 | 1005 |
} |
996 | 1006 |
return _min_cut; |
997 | 1007 |
} |
998 | 1008 |
|
999 | 1009 |
/// @} |
1000 | 1010 |
|
1001 | 1011 |
}; //class HaoOrlin |
1002 | 1012 |
|
1003 | 1013 |
} //namespace lemon |
1004 | 1014 |
|
1005 | 1015 |
#endif //LEMON_HAO_ORLIN_H |
0 comments (0 inline)