1 /* -*- mode: C++; indent-tabs-mode: nil; -*-
3 * This file is a part of LEMON, a generic C++ optimization library.
5 * Copyright (C) 2003-2010
6 * Egervary Jeno Kombinatorikus Optimalizalasi Kutatocsoport
7 * (Egervary Research Group on Combinatorial Optimization, EGRES).
9 * Permission to use, modify and distribute this software is granted
10 * provided that this copyright notice appears in all copies. For
11 * precise terms see the accompanying LICENSE file.
13 * This software is provided "AS IS" with no warranty of any kind,
14 * express or implied, and with no claim as to its suitability for any
19 #ifndef LEMON_HAO_ORLIN_H
20 #define LEMON_HAO_ORLIN_H
26 #include <lemon/maps.h>
27 #include <lemon/core.h>
28 #include <lemon/tolerance.h>
32 /// \brief Implementation of the Hao-Orlin algorithm.
34 /// Implementation of the Hao-Orlin algorithm for finding a minimum cut
41 /// \brief Hao-Orlin algorithm for finding a minimum cut in a digraph.
43 /// This class implements the Hao-Orlin algorithm for finding a minimum
44 /// value cut in a directed graph \f$D=(V,A)\f$.
45 /// It takes a fixed node \f$ source \in V \f$ and
46 /// consists of two phases: in the first phase it determines a
47 /// minimum cut with \f$ source \f$ on the source-side (i.e. a set
48 /// \f$ X\subsetneq V \f$ with \f$ source \in X \f$ and minimal outgoing
49 /// capacity) and in the second phase it determines a minimum cut
50 /// with \f$ source \f$ on the sink-side (i.e. a set
51 /// \f$ X\subsetneq V \f$ with \f$ source \notin X \f$ and minimal outgoing
52 /// capacity). Obviously, the smaller of these two cuts will be a
53 /// minimum cut of \f$ D \f$. The algorithm is a modified
54 /// preflow push-relabel algorithm. Our implementation calculates
55 /// the minimum cut in \f$ O(n^2\sqrt{m}) \f$ time (we use the
56 /// highest-label rule), or in \f$O(nm)\f$ for unit capacities. A notable
57 /// use of this algorithm is testing network reliability.
59 /// For an undirected graph you can run just the first phase of the
60 /// algorithm or you can use the algorithm of Nagamochi and Ibaraki,
61 /// which solves the undirected problem in \f$ O(nm + n^2 \log n) \f$
62 /// time. It is implemented in the NagamochiIbaraki algorithm class.
64 /// \tparam GR The type of the digraph the algorithm runs on.
65 /// \tparam CAP The type of the arc map containing the capacities,
66 /// which can be any numreric type. The default map type is
67 /// \ref concepts::Digraph::ArcMap "GR::ArcMap<int>".
68 /// \tparam TOL Tolerance class for handling inexact computations. The
69 /// default tolerance type is \ref Tolerance "Tolerance<CAP::Value>".
71 template <typename GR, typename CAP, typename TOL>
73 template <typename GR,
74 typename CAP = typename GR::template ArcMap<int>,
75 typename TOL = Tolerance<typename CAP::Value> >
80 /// The digraph type of the algorithm
82 /// The capacity map type of the algorithm
83 typedef CAP CapacityMap;
84 /// The tolerance type of the algorithm
85 typedef TOL Tolerance;
89 typedef typename CapacityMap::Value Value;
91 TEMPLATE_DIGRAPH_TYPEDEFS(Digraph);
93 const Digraph& _graph;
94 const CapacityMap* _capacity;
96 typedef typename Digraph::template ArcMap<Value> FlowMap;
103 // Bucketing structure
104 std::vector<Node> _first, _last;
105 typename Digraph::template NodeMap<Node>* _next;
106 typename Digraph::template NodeMap<Node>* _prev;
107 typename Digraph::template NodeMap<bool>* _active;
108 typename Digraph::template NodeMap<int>* _bucket;
110 std::vector<bool> _dormant;
112 std::list<std::list<int> > _sets;
113 std::list<int>::iterator _highest;
115 typedef typename Digraph::template NodeMap<Value> ExcessMap;
118 typedef typename Digraph::template NodeMap<bool> SourceSetMap;
119 SourceSetMap* _source_set;
123 typedef typename Digraph::template NodeMap<bool> MinCutMap;
124 MinCutMap* _min_cut_map;
126 Tolerance _tolerance;
130 /// \brief Constructor
132 /// Constructor of the algorithm class.
133 HaoOrlin(const Digraph& graph, const CapacityMap& capacity,
134 const Tolerance& tolerance = Tolerance()) :
135 _graph(graph), _capacity(&capacity), _flow(0), _source(),
136 _node_num(), _first(), _last(), _next(0), _prev(0),
137 _active(0), _bucket(0), _dormant(), _sets(), _highest(),
138 _excess(0), _source_set(0), _min_cut(), _min_cut_map(0),
139 _tolerance(tolerance) {}
168 /// \brief Set the tolerance used by the algorithm.
170 /// This function sets the tolerance object used by the algorithm.
171 /// \return <tt>(*this)</tt>
172 HaoOrlin& tolerance(const Tolerance& tolerance) {
173 _tolerance = tolerance;
177 /// \brief Returns a const reference to the tolerance.
179 /// This function returns a const reference to the tolerance object
180 /// used by the algorithm.
181 const Tolerance& tolerance() const {
187 void activate(const Node& i) {
188 (*_active)[i] = true;
190 int bucket = (*_bucket)[i];
192 if ((*_prev)[i] == INVALID || (*_active)[(*_prev)[i]]) return;
194 (*_next)[(*_prev)[i]] = (*_next)[i];
195 if ((*_next)[i] != INVALID) {
196 (*_prev)[(*_next)[i]] = (*_prev)[i];
198 _last[bucket] = (*_prev)[i];
201 (*_next)[i] = _first[bucket];
202 (*_prev)[_first[bucket]] = i;
203 (*_prev)[i] = INVALID;
207 void deactivate(const Node& i) {
208 (*_active)[i] = false;
209 int bucket = (*_bucket)[i];
211 if ((*_next)[i] == INVALID || !(*_active)[(*_next)[i]]) return;
214 (*_prev)[(*_next)[i]] = (*_prev)[i];
215 if ((*_prev)[i] != INVALID) {
216 (*_next)[(*_prev)[i]] = (*_next)[i];
218 _first[bucket] = (*_next)[i];
221 (*_prev)[i] = _last[bucket];
222 (*_next)[_last[bucket]] = i;
223 (*_next)[i] = INVALID;
227 void addItem(const Node& i, int bucket) {
228 (*_bucket)[i] = bucket;
229 if (_last[bucket] != INVALID) {
230 (*_prev)[i] = _last[bucket];
231 (*_next)[_last[bucket]] = i;
232 (*_next)[i] = INVALID;
235 (*_prev)[i] = INVALID;
237 (*_next)[i] = INVALID;
242 void findMinCutOut() {
244 for (NodeIt n(_graph); n != INVALID; ++n) {
246 (*_source_set)[n] = false;
249 for (ArcIt a(_graph); a != INVALID; ++a) {
254 std::vector<Node> queue(_node_num);
255 int qfirst = 0, qlast = 0, qsep = 0;
258 typename Digraph::template NodeMap<bool> reached(_graph, false);
260 reached[_source] = true;
261 bool first_set = true;
263 for (NodeIt t(_graph); t != INVALID; ++t) {
264 if (reached[t]) continue;
265 _sets.push_front(std::list<int>());
270 while (qfirst != qlast) {
271 if (qsep == qfirst) {
273 _sets.front().push_front(bucket_num);
274 _dormant[bucket_num] = !first_set;
275 _first[bucket_num] = _last[bucket_num] = INVALID;
279 Node n = queue[qfirst++];
280 addItem(n, bucket_num);
282 for (InArcIt a(_graph, n); a != INVALID; ++a) {
283 Node u = _graph.source(a);
284 if (!reached[u] && _tolerance.positive((*_capacity)[a])) {
294 (*_bucket)[_source] = 0;
297 (*_source_set)[_source] = true;
299 Node target = _last[_sets.back().back()];
301 for (OutArcIt a(_graph, _source); a != INVALID; ++a) {
302 if (_tolerance.positive((*_capacity)[a])) {
303 Node u = _graph.target(a);
304 (*_flow)[a] = (*_capacity)[a];
305 (*_excess)[u] += (*_capacity)[a];
306 if (!(*_active)[u] && u != _source) {
312 if ((*_active)[target]) {
316 _highest = _sets.back().begin();
317 while (_highest != _sets.back().end() &&
318 !(*_active)[_first[*_highest]]) {
324 while (_highest != _sets.back().end()) {
325 Node n = _first[*_highest];
326 Value excess = (*_excess)[n];
327 int next_bucket = _node_num;
330 if (++std::list<int>::iterator(_highest) == _sets.back().end()) {
333 under_bucket = *(++std::list<int>::iterator(_highest));
336 for (OutArcIt a(_graph, n); a != INVALID; ++a) {
337 Node v = _graph.target(a);
338 if (_dormant[(*_bucket)[v]]) continue;
339 Value rem = (*_capacity)[a] - (*_flow)[a];
340 if (!_tolerance.positive(rem)) continue;
341 if ((*_bucket)[v] == under_bucket) {
342 if (!(*_active)[v] && v != target) {
345 if (!_tolerance.less(rem, excess)) {
346 (*_flow)[a] += excess;
347 (*_excess)[v] += excess;
352 (*_excess)[v] += rem;
353 (*_flow)[a] = (*_capacity)[a];
355 } else if (next_bucket > (*_bucket)[v]) {
356 next_bucket = (*_bucket)[v];
360 for (InArcIt a(_graph, n); a != INVALID; ++a) {
361 Node v = _graph.source(a);
362 if (_dormant[(*_bucket)[v]]) continue;
363 Value rem = (*_flow)[a];
364 if (!_tolerance.positive(rem)) continue;
365 if ((*_bucket)[v] == under_bucket) {
366 if (!(*_active)[v] && v != target) {
369 if (!_tolerance.less(rem, excess)) {
370 (*_flow)[a] -= excess;
371 (*_excess)[v] += excess;
376 (*_excess)[v] += rem;
379 } else if (next_bucket > (*_bucket)[v]) {
380 next_bucket = (*_bucket)[v];
386 (*_excess)[n] = excess;
389 if ((*_next)[n] == INVALID) {
390 typename std::list<std::list<int> >::iterator new_set =
391 _sets.insert(--_sets.end(), std::list<int>());
392 new_set->splice(new_set->end(), _sets.back(),
393 _sets.back().begin(), ++_highest);
394 for (std::list<int>::iterator it = new_set->begin();
395 it != new_set->end(); ++it) {
396 _dormant[*it] = true;
398 while (_highest != _sets.back().end() &&
399 !(*_active)[_first[*_highest]]) {
402 } else if (next_bucket == _node_num) {
403 _first[(*_bucket)[n]] = (*_next)[n];
404 (*_prev)[(*_next)[n]] = INVALID;
406 std::list<std::list<int> >::iterator new_set =
407 _sets.insert(--_sets.end(), std::list<int>());
409 new_set->push_front(bucket_num);
410 (*_bucket)[n] = bucket_num;
411 _first[bucket_num] = _last[bucket_num] = n;
412 (*_next)[n] = INVALID;
413 (*_prev)[n] = INVALID;
414 _dormant[bucket_num] = true;
417 while (_highest != _sets.back().end() &&
418 !(*_active)[_first[*_highest]]) {
422 _first[*_highest] = (*_next)[n];
423 (*_prev)[(*_next)[n]] = INVALID;
425 while (next_bucket != *_highest) {
429 if (_highest == _sets.back().begin()) {
430 _sets.back().push_front(bucket_num);
431 _dormant[bucket_num] = false;
432 _first[bucket_num] = _last[bucket_num] = INVALID;
437 (*_bucket)[n] = *_highest;
438 (*_next)[n] = _first[*_highest];
439 if (_first[*_highest] != INVALID) {
440 (*_prev)[_first[*_highest]] = n;
442 _last[*_highest] = n;
444 _first[*_highest] = n;
449 if (!(*_active)[_first[*_highest]]) {
451 if (_highest != _sets.back().end() &&
452 !(*_active)[_first[*_highest]]) {
453 _highest = _sets.back().end();
459 if ((*_excess)[target] < _min_cut) {
460 _min_cut = (*_excess)[target];
461 for (NodeIt i(_graph); i != INVALID; ++i) {
462 (*_min_cut_map)[i] = true;
464 for (std::list<int>::iterator it = _sets.back().begin();
465 it != _sets.back().end(); ++it) {
466 Node n = _first[*it];
467 while (n != INVALID) {
468 (*_min_cut_map)[n] = false;
476 if ((*_prev)[target] != INVALID || (*_next)[target] != INVALID) {
477 if ((*_next)[target] == INVALID) {
478 _last[(*_bucket)[target]] = (*_prev)[target];
479 new_target = (*_prev)[target];
481 (*_prev)[(*_next)[target]] = (*_prev)[target];
482 new_target = (*_next)[target];
484 if ((*_prev)[target] == INVALID) {
485 _first[(*_bucket)[target]] = (*_next)[target];
487 (*_next)[(*_prev)[target]] = (*_next)[target];
490 _sets.back().pop_back();
491 if (_sets.back().empty()) {
495 for (std::list<int>::iterator it = _sets.back().begin();
496 it != _sets.back().end(); ++it) {
497 _dormant[*it] = false;
500 new_target = _last[_sets.back().back()];
503 (*_bucket)[target] = 0;
505 (*_source_set)[target] = true;
506 for (OutArcIt a(_graph, target); a != INVALID; ++a) {
507 Value rem = (*_capacity)[a] - (*_flow)[a];
508 if (!_tolerance.positive(rem)) continue;
509 Node v = _graph.target(a);
510 if (!(*_active)[v] && !(*_source_set)[v]) {
513 (*_excess)[v] += rem;
514 (*_flow)[a] = (*_capacity)[a];
517 for (InArcIt a(_graph, target); a != INVALID; ++a) {
518 Value rem = (*_flow)[a];
519 if (!_tolerance.positive(rem)) continue;
520 Node v = _graph.source(a);
521 if (!(*_active)[v] && !(*_source_set)[v]) {
524 (*_excess)[v] += rem;
529 if ((*_active)[target]) {
533 _highest = _sets.back().begin();
534 while (_highest != _sets.back().end() &&
535 !(*_active)[_first[*_highest]]) {
542 void findMinCutIn() {
544 for (NodeIt n(_graph); n != INVALID; ++n) {
546 (*_source_set)[n] = false;
549 for (ArcIt a(_graph); a != INVALID; ++a) {
554 std::vector<Node> queue(_node_num);
555 int qfirst = 0, qlast = 0, qsep = 0;
558 typename Digraph::template NodeMap<bool> reached(_graph, false);
560 reached[_source] = true;
562 bool first_set = true;
564 for (NodeIt t(_graph); t != INVALID; ++t) {
565 if (reached[t]) continue;
566 _sets.push_front(std::list<int>());
571 while (qfirst != qlast) {
572 if (qsep == qfirst) {
574 _sets.front().push_front(bucket_num);
575 _dormant[bucket_num] = !first_set;
576 _first[bucket_num] = _last[bucket_num] = INVALID;
580 Node n = queue[qfirst++];
581 addItem(n, bucket_num);
583 for (OutArcIt a(_graph, n); a != INVALID; ++a) {
584 Node u = _graph.target(a);
585 if (!reached[u] && _tolerance.positive((*_capacity)[a])) {
595 (*_bucket)[_source] = 0;
598 (*_source_set)[_source] = true;
600 Node target = _last[_sets.back().back()];
602 for (InArcIt a(_graph, _source); a != INVALID; ++a) {
603 if (_tolerance.positive((*_capacity)[a])) {
604 Node u = _graph.source(a);
605 (*_flow)[a] = (*_capacity)[a];
606 (*_excess)[u] += (*_capacity)[a];
607 if (!(*_active)[u] && u != _source) {
612 if ((*_active)[target]) {
616 _highest = _sets.back().begin();
617 while (_highest != _sets.back().end() &&
618 !(*_active)[_first[*_highest]]) {
625 while (_highest != _sets.back().end()) {
626 Node n = _first[*_highest];
627 Value excess = (*_excess)[n];
628 int next_bucket = _node_num;
631 if (++std::list<int>::iterator(_highest) == _sets.back().end()) {
634 under_bucket = *(++std::list<int>::iterator(_highest));
637 for (InArcIt a(_graph, n); a != INVALID; ++a) {
638 Node v = _graph.source(a);
639 if (_dormant[(*_bucket)[v]]) continue;
640 Value rem = (*_capacity)[a] - (*_flow)[a];
641 if (!_tolerance.positive(rem)) continue;
642 if ((*_bucket)[v] == under_bucket) {
643 if (!(*_active)[v] && v != target) {
646 if (!_tolerance.less(rem, excess)) {
647 (*_flow)[a] += excess;
648 (*_excess)[v] += excess;
653 (*_excess)[v] += rem;
654 (*_flow)[a] = (*_capacity)[a];
656 } else if (next_bucket > (*_bucket)[v]) {
657 next_bucket = (*_bucket)[v];
661 for (OutArcIt a(_graph, n); a != INVALID; ++a) {
662 Node v = _graph.target(a);
663 if (_dormant[(*_bucket)[v]]) continue;
664 Value rem = (*_flow)[a];
665 if (!_tolerance.positive(rem)) continue;
666 if ((*_bucket)[v] == under_bucket) {
667 if (!(*_active)[v] && v != target) {
670 if (!_tolerance.less(rem, excess)) {
671 (*_flow)[a] -= excess;
672 (*_excess)[v] += excess;
677 (*_excess)[v] += rem;
680 } else if (next_bucket > (*_bucket)[v]) {
681 next_bucket = (*_bucket)[v];
687 (*_excess)[n] = excess;
690 if ((*_next)[n] == INVALID) {
691 typename std::list<std::list<int> >::iterator new_set =
692 _sets.insert(--_sets.end(), std::list<int>());
693 new_set->splice(new_set->end(), _sets.back(),
694 _sets.back().begin(), ++_highest);
695 for (std::list<int>::iterator it = new_set->begin();
696 it != new_set->end(); ++it) {
697 _dormant[*it] = true;
699 while (_highest != _sets.back().end() &&
700 !(*_active)[_first[*_highest]]) {
703 } else if (next_bucket == _node_num) {
704 _first[(*_bucket)[n]] = (*_next)[n];
705 (*_prev)[(*_next)[n]] = INVALID;
707 std::list<std::list<int> >::iterator new_set =
708 _sets.insert(--_sets.end(), std::list<int>());
710 new_set->push_front(bucket_num);
711 (*_bucket)[n] = bucket_num;
712 _first[bucket_num] = _last[bucket_num] = n;
713 (*_next)[n] = INVALID;
714 (*_prev)[n] = INVALID;
715 _dormant[bucket_num] = true;
718 while (_highest != _sets.back().end() &&
719 !(*_active)[_first[*_highest]]) {
723 _first[*_highest] = (*_next)[n];
724 (*_prev)[(*_next)[n]] = INVALID;
726 while (next_bucket != *_highest) {
729 if (_highest == _sets.back().begin()) {
730 _sets.back().push_front(bucket_num);
731 _dormant[bucket_num] = false;
732 _first[bucket_num] = _last[bucket_num] = INVALID;
737 (*_bucket)[n] = *_highest;
738 (*_next)[n] = _first[*_highest];
739 if (_first[*_highest] != INVALID) {
740 (*_prev)[_first[*_highest]] = n;
742 _last[*_highest] = n;
744 _first[*_highest] = n;
749 if (!(*_active)[_first[*_highest]]) {
751 if (_highest != _sets.back().end() &&
752 !(*_active)[_first[*_highest]]) {
753 _highest = _sets.back().end();
759 if ((*_excess)[target] < _min_cut) {
760 _min_cut = (*_excess)[target];
761 for (NodeIt i(_graph); i != INVALID; ++i) {
762 (*_min_cut_map)[i] = false;
764 for (std::list<int>::iterator it = _sets.back().begin();
765 it != _sets.back().end(); ++it) {
766 Node n = _first[*it];
767 while (n != INVALID) {
768 (*_min_cut_map)[n] = true;
776 if ((*_prev)[target] != INVALID || (*_next)[target] != INVALID) {
777 if ((*_next)[target] == INVALID) {
778 _last[(*_bucket)[target]] = (*_prev)[target];
779 new_target = (*_prev)[target];
781 (*_prev)[(*_next)[target]] = (*_prev)[target];
782 new_target = (*_next)[target];
784 if ((*_prev)[target] == INVALID) {
785 _first[(*_bucket)[target]] = (*_next)[target];
787 (*_next)[(*_prev)[target]] = (*_next)[target];
790 _sets.back().pop_back();
791 if (_sets.back().empty()) {
795 for (std::list<int>::iterator it = _sets.back().begin();
796 it != _sets.back().end(); ++it) {
797 _dormant[*it] = false;
800 new_target = _last[_sets.back().back()];
803 (*_bucket)[target] = 0;
805 (*_source_set)[target] = true;
806 for (InArcIt a(_graph, target); a != INVALID; ++a) {
807 Value rem = (*_capacity)[a] - (*_flow)[a];
808 if (!_tolerance.positive(rem)) continue;
809 Node v = _graph.source(a);
810 if (!(*_active)[v] && !(*_source_set)[v]) {
813 (*_excess)[v] += rem;
814 (*_flow)[a] = (*_capacity)[a];
817 for (OutArcIt a(_graph, target); a != INVALID; ++a) {
818 Value rem = (*_flow)[a];
819 if (!_tolerance.positive(rem)) continue;
820 Node v = _graph.target(a);
821 if (!(*_active)[v] && !(*_source_set)[v]) {
824 (*_excess)[v] += rem;
829 if ((*_active)[target]) {
833 _highest = _sets.back().begin();
834 while (_highest != _sets.back().end() &&
835 !(*_active)[_first[*_highest]]) {
844 /// \name Execution Control
845 /// The simplest way to execute the algorithm is to use
846 /// one of the member functions called \ref run().
848 /// If you need better control on the execution,
849 /// you have to call one of the \ref init() functions first, then
850 /// \ref calculateOut() and/or \ref calculateIn().
854 /// \brief Initialize the internal data structures.
856 /// This function initializes the internal data structures. It creates
857 /// the maps and some bucket structures for the algorithm.
858 /// The first node is used as the source node for the push-relabel
861 init(NodeIt(_graph));
864 /// \brief Initialize the internal data structures.
866 /// This function initializes the internal data structures. It creates
867 /// the maps and some bucket structures for the algorithm.
868 /// The given node is used as the source node for the push-relabel
870 void init(const Node& source) {
873 _node_num = countNodes(_graph);
875 _first.resize(_node_num);
876 _last.resize(_node_num);
878 _dormant.resize(_node_num);
881 _flow = new FlowMap(_graph);
884 _next = new typename Digraph::template NodeMap<Node>(_graph);
887 _prev = new typename Digraph::template NodeMap<Node>(_graph);
890 _active = new typename Digraph::template NodeMap<bool>(_graph);
893 _bucket = new typename Digraph::template NodeMap<int>(_graph);
896 _excess = new ExcessMap(_graph);
899 _source_set = new SourceSetMap(_graph);
902 _min_cut_map = new MinCutMap(_graph);
905 _min_cut = std::numeric_limits<Value>::max();
909 /// \brief Calculate a minimum cut with \f$ source \f$ on the
912 /// This function calculates a minimum cut with \f$ source \f$ on the
913 /// source-side (i.e. a set \f$ X\subsetneq V \f$ with
914 /// \f$ source \in X \f$ and minimal outgoing capacity).
915 /// It updates the stored cut if (and only if) the newly found one
918 /// \pre \ref init() must be called before using this function.
919 void calculateOut() {
923 /// \brief Calculate a minimum cut with \f$ source \f$ on the
926 /// This function calculates a minimum cut with \f$ source \f$ on the
927 /// sink-side (i.e. a set \f$ X\subsetneq V \f$ with
928 /// \f$ source \notin X \f$ and minimal outgoing capacity).
929 /// It updates the stored cut if (and only if) the newly found one
932 /// \pre \ref init() must be called before using this function.
938 /// \brief Run the algorithm.
940 /// This function runs the algorithm. It chooses source node,
941 /// then calls \ref init(), \ref calculateOut()
942 /// and \ref calculateIn().
949 /// \brief Run the algorithm.
951 /// This function runs the algorithm. It calls \ref init(),
952 /// \ref calculateOut() and \ref calculateIn() with the given
954 void run(const Node& s) {
962 /// \name Query Functions
963 /// The result of the %HaoOrlin algorithm
964 /// can be obtained using these functions.\n
965 /// \ref run(), \ref calculateOut() or \ref calculateIn()
966 /// should be called before using them.
970 /// \brief Return the value of the minimum cut.
972 /// This function returns the value of the best cut found by the
973 /// previously called \ref run(), \ref calculateOut() or \ref
976 /// \pre \ref run(), \ref calculateOut() or \ref calculateIn()
977 /// must be called before using this function.
978 Value minCutValue() const {
983 /// \brief Return a minimum cut.
985 /// This function gives the best cut found by the
986 /// previously called \ref run(), \ref calculateOut() or \ref
989 /// It sets \c cutMap to the characteristic vector of the found
990 /// minimum value cut - a non-empty set \f$ X\subsetneq V \f$
991 /// of minimum outgoing capacity (i.e. \c cutMap will be \c true exactly
992 /// for the nodes of \f$ X \f$).
994 /// \param cutMap A \ref concepts::WriteMap "writable" node map with
995 /// \c bool (or convertible) value type.
997 /// \return The value of the minimum cut.
999 /// \pre \ref run(), \ref calculateOut() or \ref calculateIn()
1000 /// must be called before using this function.
1001 template <typename CutMap>
1002 Value minCutMap(CutMap& cutMap) const {
1003 for (NodeIt it(_graph); it != INVALID; ++it) {
1004 cutMap.set(it, (*_min_cut_map)[it]);
1015 #endif //LEMON_HAO_ORLIN_H