1.1 --- a/lemon/hao_orlin.h Mon Jan 12 23:11:39 2009 +0100
1.2 +++ b/lemon/hao_orlin.h Thu Nov 05 15:48:01 2009 +0100
1.3 @@ -31,57 +31,64 @@
1.4 /// \ingroup min_cut
1.5 /// \brief Implementation of the Hao-Orlin algorithm.
1.6 ///
1.7 -/// Implementation of the Hao-Orlin algorithm class for testing network
1.8 -/// reliability.
1.9 +/// Implementation of the Hao-Orlin algorithm for finding a minimum cut
1.10 +/// in a digraph.
1.11
1.12 namespace lemon {
1.13
1.14 /// \ingroup min_cut
1.15 ///
1.16 - /// \brief %Hao-Orlin algorithm to find a minimum cut in directed graphs.
1.17 + /// \brief Hao-Orlin algorithm for finding a minimum cut in a digraph.
1.18 ///
1.19 - /// Hao-Orlin calculates a minimum cut in a directed graph
1.20 - /// \f$D=(V,A)\f$. It takes a fixed node \f$ source \in V \f$ and
1.21 + /// This class implements the Hao-Orlin algorithm for finding a minimum
1.22 + /// value cut in a directed graph \f$D=(V,A)\f$.
1.23 + /// It takes a fixed node \f$ source \in V \f$ and
1.24 /// consists of two phases: in the first phase it determines a
1.25 /// minimum cut with \f$ source \f$ on the source-side (i.e. a set
1.26 - /// \f$ X\subsetneq V \f$ with \f$ source \in X \f$ and minimal
1.27 - /// out-degree) and in the second phase it determines a minimum cut
1.28 + /// \f$ X\subsetneq V \f$ with \f$ source \in X \f$ and minimal outgoing
1.29 + /// capacity) and in the second phase it determines a minimum cut
1.30 /// with \f$ source \f$ on the sink-side (i.e. a set
1.31 - /// \f$ X\subsetneq V \f$ with \f$ source \notin X \f$ and minimal
1.32 - /// out-degree). Obviously, the smaller of these two cuts will be a
1.33 + /// \f$ X\subsetneq V \f$ with \f$ source \notin X \f$ and minimal outgoing
1.34 + /// capacity). Obviously, the smaller of these two cuts will be a
1.35 /// minimum cut of \f$ D \f$. The algorithm is a modified
1.36 - /// push-relabel preflow algorithm and our implementation calculates
1.37 + /// preflow push-relabel algorithm. Our implementation calculates
1.38 /// the minimum cut in \f$ O(n^2\sqrt{m}) \f$ time (we use the
1.39 /// highest-label rule), or in \f$O(nm)\f$ for unit capacities. The
1.40 - /// purpose of such algorithm is testing network reliability. For an
1.41 - /// undirected graph you can run just the first phase of the
1.42 - /// algorithm or you can use the algorithm of Nagamochi and Ibaraki
1.43 - /// which solves the undirected problem in
1.44 - /// \f$ O(nm + n^2 \log(n)) \f$ time: it is implemented in the
1.45 - /// NagamochiIbaraki algorithm class.
1.46 + /// purpose of such algorithm is e.g. testing network reliability.
1.47 ///
1.48 - /// \param _Digraph is the graph type of the algorithm.
1.49 - /// \param _CapacityMap is an edge map of capacities which should
1.50 - /// be any numreric type. The default type is _Digraph::ArcMap<int>.
1.51 - /// \param _Tolerance is the handler of the inexact computation. The
1.52 - /// default type for this is Tolerance<CapacityMap::Value>.
1.53 + /// For an undirected graph you can run just the first phase of the
1.54 + /// algorithm or you can use the algorithm of Nagamochi and Ibaraki,
1.55 + /// which solves the undirected problem in \f$ O(nm + n^2 \log n) \f$
1.56 + /// time. It is implemented in the NagamochiIbaraki algorithm class.
1.57 + ///
1.58 + /// \tparam GR The type of the digraph the algorithm runs on.
1.59 + /// \tparam CAP The type of the arc map containing the capacities,
1.60 + /// which can be any numreric type. The default map type is
1.61 + /// \ref concepts::Digraph::ArcMap "GR::ArcMap<int>".
1.62 + /// \tparam TOL Tolerance class for handling inexact computations. The
1.63 + /// default tolerance type is \ref Tolerance "Tolerance<CAP::Value>".
1.64 #ifdef DOXYGEN
1.65 - template <typename _Digraph, typename _CapacityMap, typename _Tolerance>
1.66 + template <typename GR, typename CAP, typename TOL>
1.67 #else
1.68 - template <typename _Digraph,
1.69 - typename _CapacityMap = typename _Digraph::template ArcMap<int>,
1.70 - typename _Tolerance = Tolerance<typename _CapacityMap::Value> >
1.71 + template <typename GR,
1.72 + typename CAP = typename GR::template ArcMap<int>,
1.73 + typename TOL = Tolerance<typename CAP::Value> >
1.74 #endif
1.75 class HaoOrlin {
1.76 + public:
1.77 +
1.78 + /// The digraph type of the algorithm
1.79 + typedef GR Digraph;
1.80 + /// The capacity map type of the algorithm
1.81 + typedef CAP CapacityMap;
1.82 + /// The tolerance type of the algorithm
1.83 + typedef TOL Tolerance;
1.84 +
1.85 private:
1.86
1.87 - typedef _Digraph Digraph;
1.88 - typedef _CapacityMap CapacityMap;
1.89 - typedef _Tolerance Tolerance;
1.90 -
1.91 typedef typename CapacityMap::Value Value;
1.92
1.93 - TEMPLATE_GRAPH_TYPEDEFS(Digraph);
1.94 + TEMPLATE_DIGRAPH_TYPEDEFS(Digraph);
1.95
1.96 const Digraph& _graph;
1.97 const CapacityMap* _capacity;
1.98 @@ -161,56 +168,56 @@
1.99 private:
1.100
1.101 void activate(const Node& i) {
1.102 - _active->set(i, true);
1.103 + (*_active)[i] = true;
1.104
1.105 int bucket = (*_bucket)[i];
1.106
1.107 if ((*_prev)[i] == INVALID || (*_active)[(*_prev)[i]]) return;
1.108 //unlace
1.109 - _next->set((*_prev)[i], (*_next)[i]);
1.110 + (*_next)[(*_prev)[i]] = (*_next)[i];
1.111 if ((*_next)[i] != INVALID) {
1.112 - _prev->set((*_next)[i], (*_prev)[i]);
1.113 + (*_prev)[(*_next)[i]] = (*_prev)[i];
1.114 } else {
1.115 _last[bucket] = (*_prev)[i];
1.116 }
1.117 //lace
1.118 - _next->set(i, _first[bucket]);
1.119 - _prev->set(_first[bucket], i);
1.120 - _prev->set(i, INVALID);
1.121 + (*_next)[i] = _first[bucket];
1.122 + (*_prev)[_first[bucket]] = i;
1.123 + (*_prev)[i] = INVALID;
1.124 _first[bucket] = i;
1.125 }
1.126
1.127 void deactivate(const Node& i) {
1.128 - _active->set(i, false);
1.129 + (*_active)[i] = false;
1.130 int bucket = (*_bucket)[i];
1.131
1.132 if ((*_next)[i] == INVALID || !(*_active)[(*_next)[i]]) return;
1.133
1.134 //unlace
1.135 - _prev->set((*_next)[i], (*_prev)[i]);
1.136 + (*_prev)[(*_next)[i]] = (*_prev)[i];
1.137 if ((*_prev)[i] != INVALID) {
1.138 - _next->set((*_prev)[i], (*_next)[i]);
1.139 + (*_next)[(*_prev)[i]] = (*_next)[i];
1.140 } else {
1.141 _first[bucket] = (*_next)[i];
1.142 }
1.143 //lace
1.144 - _prev->set(i, _last[bucket]);
1.145 - _next->set(_last[bucket], i);
1.146 - _next->set(i, INVALID);
1.147 + (*_prev)[i] = _last[bucket];
1.148 + (*_next)[_last[bucket]] = i;
1.149 + (*_next)[i] = INVALID;
1.150 _last[bucket] = i;
1.151 }
1.152
1.153 void addItem(const Node& i, int bucket) {
1.154 (*_bucket)[i] = bucket;
1.155 if (_last[bucket] != INVALID) {
1.156 - _prev->set(i, _last[bucket]);
1.157 - _next->set(_last[bucket], i);
1.158 - _next->set(i, INVALID);
1.159 + (*_prev)[i] = _last[bucket];
1.160 + (*_next)[_last[bucket]] = i;
1.161 + (*_next)[i] = INVALID;
1.162 _last[bucket] = i;
1.163 } else {
1.164 - _prev->set(i, INVALID);
1.165 + (*_prev)[i] = INVALID;
1.166 _first[bucket] = i;
1.167 - _next->set(i, INVALID);
1.168 + (*_next)[i] = INVALID;
1.169 _last[bucket] = i;
1.170 }
1.171 }
1.172 @@ -218,11 +225,12 @@
1.173 void findMinCutOut() {
1.174
1.175 for (NodeIt n(_graph); n != INVALID; ++n) {
1.176 - _excess->set(n, 0);
1.177 + (*_excess)[n] = 0;
1.178 + (*_source_set)[n] = false;
1.179 }
1.180
1.181 for (ArcIt a(_graph); a != INVALID; ++a) {
1.182 - _flow->set(a, 0);
1.183 + (*_flow)[a] = 0;
1.184 }
1.185
1.186 int bucket_num = 0;
1.187 @@ -232,7 +240,7 @@
1.188 {
1.189 typename Digraph::template NodeMap<bool> reached(_graph, false);
1.190
1.191 - reached.set(_source, true);
1.192 + reached[_source] = true;
1.193 bool first_set = true;
1.194
1.195 for (NodeIt t(_graph); t != INVALID; ++t) {
1.196 @@ -240,7 +248,7 @@
1.197 _sets.push_front(std::list<int>());
1.198
1.199 queue[qlast++] = t;
1.200 - reached.set(t, true);
1.201 + reached[t] = true;
1.202
1.203 while (qfirst != qlast) {
1.204 if (qsep == qfirst) {
1.205 @@ -257,7 +265,7 @@
1.206 for (InArcIt a(_graph, n); a != INVALID; ++a) {
1.207 Node u = _graph.source(a);
1.208 if (!reached[u] && _tolerance.positive((*_capacity)[a])) {
1.209 - reached.set(u, true);
1.210 + reached[u] = true;
1.211 queue[qlast++] = u;
1.212 }
1.213 }
1.214 @@ -266,18 +274,18 @@
1.215 }
1.216
1.217 ++bucket_num;
1.218 - _bucket->set(_source, 0);
1.219 + (*_bucket)[_source] = 0;
1.220 _dormant[0] = true;
1.221 }
1.222 - _source_set->set(_source, true);
1.223 + (*_source_set)[_source] = true;
1.224
1.225 Node target = _last[_sets.back().back()];
1.226 {
1.227 for (OutArcIt a(_graph, _source); a != INVALID; ++a) {
1.228 if (_tolerance.positive((*_capacity)[a])) {
1.229 Node u = _graph.target(a);
1.230 - _flow->set(a, (*_capacity)[a]);
1.231 - _excess->set(u, (*_excess)[u] + (*_capacity)[a]);
1.232 + (*_flow)[a] = (*_capacity)[a];
1.233 + (*_excess)[u] += (*_capacity)[a];
1.234 if (!(*_active)[u] && u != _source) {
1.235 activate(u);
1.236 }
1.237 @@ -318,14 +326,14 @@
1.238 activate(v);
1.239 }
1.240 if (!_tolerance.less(rem, excess)) {
1.241 - _flow->set(a, (*_flow)[a] + excess);
1.242 - _excess->set(v, (*_excess)[v] + excess);
1.243 + (*_flow)[a] += excess;
1.244 + (*_excess)[v] += excess;
1.245 excess = 0;
1.246 goto no_more_push;
1.247 } else {
1.248 excess -= rem;
1.249 - _excess->set(v, (*_excess)[v] + rem);
1.250 - _flow->set(a, (*_capacity)[a]);
1.251 + (*_excess)[v] += rem;
1.252 + (*_flow)[a] = (*_capacity)[a];
1.253 }
1.254 } else if (next_bucket > (*_bucket)[v]) {
1.255 next_bucket = (*_bucket)[v];
1.256 @@ -342,14 +350,14 @@
1.257 activate(v);
1.258 }
1.259 if (!_tolerance.less(rem, excess)) {
1.260 - _flow->set(a, (*_flow)[a] - excess);
1.261 - _excess->set(v, (*_excess)[v] + excess);
1.262 + (*_flow)[a] -= excess;
1.263 + (*_excess)[v] += excess;
1.264 excess = 0;
1.265 goto no_more_push;
1.266 } else {
1.267 excess -= rem;
1.268 - _excess->set(v, (*_excess)[v] + rem);
1.269 - _flow->set(a, 0);
1.270 + (*_excess)[v] += rem;
1.271 + (*_flow)[a] = 0;
1.272 }
1.273 } else if (next_bucket > (*_bucket)[v]) {
1.274 next_bucket = (*_bucket)[v];
1.275 @@ -358,7 +366,7 @@
1.276
1.277 no_more_push:
1.278
1.279 - _excess->set(n, excess);
1.280 + (*_excess)[n] = excess;
1.281
1.282 if (excess != 0) {
1.283 if ((*_next)[n] == INVALID) {
1.284 @@ -376,16 +384,16 @@
1.285 }
1.286 } else if (next_bucket == _node_num) {
1.287 _first[(*_bucket)[n]] = (*_next)[n];
1.288 - _prev->set((*_next)[n], INVALID);
1.289 + (*_prev)[(*_next)[n]] = INVALID;
1.290
1.291 std::list<std::list<int> >::iterator new_set =
1.292 _sets.insert(--_sets.end(), std::list<int>());
1.293
1.294 new_set->push_front(bucket_num);
1.295 - _bucket->set(n, bucket_num);
1.296 + (*_bucket)[n] = bucket_num;
1.297 _first[bucket_num] = _last[bucket_num] = n;
1.298 - _next->set(n, INVALID);
1.299 - _prev->set(n, INVALID);
1.300 + (*_next)[n] = INVALID;
1.301 + (*_prev)[n] = INVALID;
1.302 _dormant[bucket_num] = true;
1.303 ++bucket_num;
1.304
1.305 @@ -395,7 +403,7 @@
1.306 }
1.307 } else {
1.308 _first[*_highest] = (*_next)[n];
1.309 - _prev->set((*_next)[n], INVALID);
1.310 + (*_prev)[(*_next)[n]] = INVALID;
1.311
1.312 while (next_bucket != *_highest) {
1.313 --_highest;
1.314 @@ -409,10 +417,10 @@
1.315 }
1.316 --_highest;
1.317
1.318 - _bucket->set(n, *_highest);
1.319 - _next->set(n, _first[*_highest]);
1.320 + (*_bucket)[n] = *_highest;
1.321 + (*_next)[n] = _first[*_highest];
1.322 if (_first[*_highest] != INVALID) {
1.323 - _prev->set(_first[*_highest], n);
1.324 + (*_prev)[_first[*_highest]] = n;
1.325 } else {
1.326 _last[*_highest] = n;
1.327 }
1.328 @@ -434,13 +442,13 @@
1.329 if ((*_excess)[target] < _min_cut) {
1.330 _min_cut = (*_excess)[target];
1.331 for (NodeIt i(_graph); i != INVALID; ++i) {
1.332 - _min_cut_map->set(i, true);
1.333 + (*_min_cut_map)[i] = true;
1.334 }
1.335 for (std::list<int>::iterator it = _sets.back().begin();
1.336 it != _sets.back().end(); ++it) {
1.337 Node n = _first[*it];
1.338 while (n != INVALID) {
1.339 - _min_cut_map->set(n, false);
1.340 + (*_min_cut_map)[n] = false;
1.341 n = (*_next)[n];
1.342 }
1.343 }
1.344 @@ -453,13 +461,13 @@
1.345 _last[(*_bucket)[target]] = (*_prev)[target];
1.346 new_target = (*_prev)[target];
1.347 } else {
1.348 - _prev->set((*_next)[target], (*_prev)[target]);
1.349 + (*_prev)[(*_next)[target]] = (*_prev)[target];
1.350 new_target = (*_next)[target];
1.351 }
1.352 if ((*_prev)[target] == INVALID) {
1.353 _first[(*_bucket)[target]] = (*_next)[target];
1.354 } else {
1.355 - _next->set((*_prev)[target], (*_next)[target]);
1.356 + (*_next)[(*_prev)[target]] = (*_next)[target];
1.357 }
1.358 } else {
1.359 _sets.back().pop_back();
1.360 @@ -475,9 +483,9 @@
1.361 new_target = _last[_sets.back().back()];
1.362 }
1.363
1.364 - _bucket->set(target, 0);
1.365 + (*_bucket)[target] = 0;
1.366
1.367 - _source_set->set(target, true);
1.368 + (*_source_set)[target] = true;
1.369 for (OutArcIt a(_graph, target); a != INVALID; ++a) {
1.370 Value rem = (*_capacity)[a] - (*_flow)[a];
1.371 if (!_tolerance.positive(rem)) continue;
1.372 @@ -485,8 +493,8 @@
1.373 if (!(*_active)[v] && !(*_source_set)[v]) {
1.374 activate(v);
1.375 }
1.376 - _excess->set(v, (*_excess)[v] + rem);
1.377 - _flow->set(a, (*_capacity)[a]);
1.378 + (*_excess)[v] += rem;
1.379 + (*_flow)[a] = (*_capacity)[a];
1.380 }
1.381
1.382 for (InArcIt a(_graph, target); a != INVALID; ++a) {
1.383 @@ -496,8 +504,8 @@
1.384 if (!(*_active)[v] && !(*_source_set)[v]) {
1.385 activate(v);
1.386 }
1.387 - _excess->set(v, (*_excess)[v] + rem);
1.388 - _flow->set(a, 0);
1.389 + (*_excess)[v] += rem;
1.390 + (*_flow)[a] = 0;
1.391 }
1.392
1.393 target = new_target;
1.394 @@ -517,11 +525,12 @@
1.395 void findMinCutIn() {
1.396
1.397 for (NodeIt n(_graph); n != INVALID; ++n) {
1.398 - _excess->set(n, 0);
1.399 + (*_excess)[n] = 0;
1.400 + (*_source_set)[n] = false;
1.401 }
1.402
1.403 for (ArcIt a(_graph); a != INVALID; ++a) {
1.404 - _flow->set(a, 0);
1.405 + (*_flow)[a] = 0;
1.406 }
1.407
1.408 int bucket_num = 0;
1.409 @@ -531,7 +540,7 @@
1.410 {
1.411 typename Digraph::template NodeMap<bool> reached(_graph, false);
1.412
1.413 - reached.set(_source, true);
1.414 + reached[_source] = true;
1.415
1.416 bool first_set = true;
1.417
1.418 @@ -540,7 +549,7 @@
1.419 _sets.push_front(std::list<int>());
1.420
1.421 queue[qlast++] = t;
1.422 - reached.set(t, true);
1.423 + reached[t] = true;
1.424
1.425 while (qfirst != qlast) {
1.426 if (qsep == qfirst) {
1.427 @@ -557,7 +566,7 @@
1.428 for (OutArcIt a(_graph, n); a != INVALID; ++a) {
1.429 Node u = _graph.target(a);
1.430 if (!reached[u] && _tolerance.positive((*_capacity)[a])) {
1.431 - reached.set(u, true);
1.432 + reached[u] = true;
1.433 queue[qlast++] = u;
1.434 }
1.435 }
1.436 @@ -566,18 +575,18 @@
1.437 }
1.438
1.439 ++bucket_num;
1.440 - _bucket->set(_source, 0);
1.441 + (*_bucket)[_source] = 0;
1.442 _dormant[0] = true;
1.443 }
1.444 - _source_set->set(_source, true);
1.445 + (*_source_set)[_source] = true;
1.446
1.447 Node target = _last[_sets.back().back()];
1.448 {
1.449 for (InArcIt a(_graph, _source); a != INVALID; ++a) {
1.450 if (_tolerance.positive((*_capacity)[a])) {
1.451 Node u = _graph.source(a);
1.452 - _flow->set(a, (*_capacity)[a]);
1.453 - _excess->set(u, (*_excess)[u] + (*_capacity)[a]);
1.454 + (*_flow)[a] = (*_capacity)[a];
1.455 + (*_excess)[u] += (*_capacity)[a];
1.456 if (!(*_active)[u] && u != _source) {
1.457 activate(u);
1.458 }
1.459 @@ -618,14 +627,14 @@
1.460 activate(v);
1.461 }
1.462 if (!_tolerance.less(rem, excess)) {
1.463 - _flow->set(a, (*_flow)[a] + excess);
1.464 - _excess->set(v, (*_excess)[v] + excess);
1.465 + (*_flow)[a] += excess;
1.466 + (*_excess)[v] += excess;
1.467 excess = 0;
1.468 goto no_more_push;
1.469 } else {
1.470 excess -= rem;
1.471 - _excess->set(v, (*_excess)[v] + rem);
1.472 - _flow->set(a, (*_capacity)[a]);
1.473 + (*_excess)[v] += rem;
1.474 + (*_flow)[a] = (*_capacity)[a];
1.475 }
1.476 } else if (next_bucket > (*_bucket)[v]) {
1.477 next_bucket = (*_bucket)[v];
1.478 @@ -642,14 +651,14 @@
1.479 activate(v);
1.480 }
1.481 if (!_tolerance.less(rem, excess)) {
1.482 - _flow->set(a, (*_flow)[a] - excess);
1.483 - _excess->set(v, (*_excess)[v] + excess);
1.484 + (*_flow)[a] -= excess;
1.485 + (*_excess)[v] += excess;
1.486 excess = 0;
1.487 goto no_more_push;
1.488 } else {
1.489 excess -= rem;
1.490 - _excess->set(v, (*_excess)[v] + rem);
1.491 - _flow->set(a, 0);
1.492 + (*_excess)[v] += rem;
1.493 + (*_flow)[a] = 0;
1.494 }
1.495 } else if (next_bucket > (*_bucket)[v]) {
1.496 next_bucket = (*_bucket)[v];
1.497 @@ -658,7 +667,7 @@
1.498
1.499 no_more_push:
1.500
1.501 - _excess->set(n, excess);
1.502 + (*_excess)[n] = excess;
1.503
1.504 if (excess != 0) {
1.505 if ((*_next)[n] == INVALID) {
1.506 @@ -676,16 +685,16 @@
1.507 }
1.508 } else if (next_bucket == _node_num) {
1.509 _first[(*_bucket)[n]] = (*_next)[n];
1.510 - _prev->set((*_next)[n], INVALID);
1.511 + (*_prev)[(*_next)[n]] = INVALID;
1.512
1.513 std::list<std::list<int> >::iterator new_set =
1.514 _sets.insert(--_sets.end(), std::list<int>());
1.515
1.516 new_set->push_front(bucket_num);
1.517 - _bucket->set(n, bucket_num);
1.518 + (*_bucket)[n] = bucket_num;
1.519 _first[bucket_num] = _last[bucket_num] = n;
1.520 - _next->set(n, INVALID);
1.521 - _prev->set(n, INVALID);
1.522 + (*_next)[n] = INVALID;
1.523 + (*_prev)[n] = INVALID;
1.524 _dormant[bucket_num] = true;
1.525 ++bucket_num;
1.526
1.527 @@ -695,7 +704,7 @@
1.528 }
1.529 } else {
1.530 _first[*_highest] = (*_next)[n];
1.531 - _prev->set((*_next)[n], INVALID);
1.532 + (*_prev)[(*_next)[n]] = INVALID;
1.533
1.534 while (next_bucket != *_highest) {
1.535 --_highest;
1.536 @@ -708,10 +717,10 @@
1.537 }
1.538 --_highest;
1.539
1.540 - _bucket->set(n, *_highest);
1.541 - _next->set(n, _first[*_highest]);
1.542 + (*_bucket)[n] = *_highest;
1.543 + (*_next)[n] = _first[*_highest];
1.544 if (_first[*_highest] != INVALID) {
1.545 - _prev->set(_first[*_highest], n);
1.546 + (*_prev)[_first[*_highest]] = n;
1.547 } else {
1.548 _last[*_highest] = n;
1.549 }
1.550 @@ -733,13 +742,13 @@
1.551 if ((*_excess)[target] < _min_cut) {
1.552 _min_cut = (*_excess)[target];
1.553 for (NodeIt i(_graph); i != INVALID; ++i) {
1.554 - _min_cut_map->set(i, false);
1.555 + (*_min_cut_map)[i] = false;
1.556 }
1.557 for (std::list<int>::iterator it = _sets.back().begin();
1.558 it != _sets.back().end(); ++it) {
1.559 Node n = _first[*it];
1.560 while (n != INVALID) {
1.561 - _min_cut_map->set(n, true);
1.562 + (*_min_cut_map)[n] = true;
1.563 n = (*_next)[n];
1.564 }
1.565 }
1.566 @@ -752,13 +761,13 @@
1.567 _last[(*_bucket)[target]] = (*_prev)[target];
1.568 new_target = (*_prev)[target];
1.569 } else {
1.570 - _prev->set((*_next)[target], (*_prev)[target]);
1.571 + (*_prev)[(*_next)[target]] = (*_prev)[target];
1.572 new_target = (*_next)[target];
1.573 }
1.574 if ((*_prev)[target] == INVALID) {
1.575 _first[(*_bucket)[target]] = (*_next)[target];
1.576 } else {
1.577 - _next->set((*_prev)[target], (*_next)[target]);
1.578 + (*_next)[(*_prev)[target]] = (*_next)[target];
1.579 }
1.580 } else {
1.581 _sets.back().pop_back();
1.582 @@ -774,9 +783,9 @@
1.583 new_target = _last[_sets.back().back()];
1.584 }
1.585
1.586 - _bucket->set(target, 0);
1.587 + (*_bucket)[target] = 0;
1.588
1.589 - _source_set->set(target, true);
1.590 + (*_source_set)[target] = true;
1.591 for (InArcIt a(_graph, target); a != INVALID; ++a) {
1.592 Value rem = (*_capacity)[a] - (*_flow)[a];
1.593 if (!_tolerance.positive(rem)) continue;
1.594 @@ -784,8 +793,8 @@
1.595 if (!(*_active)[v] && !(*_source_set)[v]) {
1.596 activate(v);
1.597 }
1.598 - _excess->set(v, (*_excess)[v] + rem);
1.599 - _flow->set(a, (*_capacity)[a]);
1.600 + (*_excess)[v] += rem;
1.601 + (*_flow)[a] = (*_capacity)[a];
1.602 }
1.603
1.604 for (OutArcIt a(_graph, target); a != INVALID; ++a) {
1.605 @@ -795,8 +804,8 @@
1.606 if (!(*_active)[v] && !(*_source_set)[v]) {
1.607 activate(v);
1.608 }
1.609 - _excess->set(v, (*_excess)[v] + rem);
1.610 - _flow->set(a, 0);
1.611 + (*_excess)[v] += rem;
1.612 + (*_flow)[a] = 0;
1.613 }
1.614
1.615 target = new_target;
1.616 @@ -815,31 +824,32 @@
1.617
1.618 public:
1.619
1.620 - /// \name Execution control
1.621 + /// \name Execution Control
1.622 /// The simplest way to execute the algorithm is to use
1.623 - /// one of the member functions called \c run(...).
1.624 + /// one of the member functions called \ref run().
1.625 /// \n
1.626 - /// If you need more control on the execution,
1.627 - /// first you must call \ref init(), then the \ref calculateIn() or
1.628 - /// \ref calculateOut() functions.
1.629 + /// If you need better control on the execution,
1.630 + /// you have to call one of the \ref init() functions first, then
1.631 + /// \ref calculateOut() and/or \ref calculateIn().
1.632
1.633 /// @{
1.634
1.635 - /// \brief Initializes the internal data structures.
1.636 + /// \brief Initialize the internal data structures.
1.637 ///
1.638 - /// Initializes the internal data structures. It creates
1.639 - /// the maps, residual graph adaptors and some bucket structures
1.640 - /// for the algorithm.
1.641 + /// This function initializes the internal data structures. It creates
1.642 + /// the maps and some bucket structures for the algorithm.
1.643 + /// The first node is used as the source node for the push-relabel
1.644 + /// algorithm.
1.645 void init() {
1.646 init(NodeIt(_graph));
1.647 }
1.648
1.649 - /// \brief Initializes the internal data structures.
1.650 + /// \brief Initialize the internal data structures.
1.651 ///
1.652 - /// Initializes the internal data structures. It creates
1.653 - /// the maps, residual graph adaptor and some bucket structures
1.654 - /// for the algorithm. Node \c source is used as the push-relabel
1.655 - /// algorithm's source.
1.656 + /// This function initializes the internal data structures. It creates
1.657 + /// the maps and some bucket structures for the algorithm.
1.658 + /// The given node is used as the source node for the push-relabel
1.659 + /// algorithm.
1.660 void init(const Node& source) {
1.661 _source = source;
1.662
1.663 @@ -879,31 +889,35 @@
1.664 }
1.665
1.666
1.667 - /// \brief Calculates a minimum cut with \f$ source \f$ on the
1.668 + /// \brief Calculate a minimum cut with \f$ source \f$ on the
1.669 /// source-side.
1.670 ///
1.671 - /// Calculates a minimum cut with \f$ source \f$ on the
1.672 + /// This function calculates a minimum cut with \f$ source \f$ on the
1.673 /// source-side (i.e. a set \f$ X\subsetneq V \f$ with
1.674 - /// \f$ source \in X \f$ and minimal out-degree).
1.675 + /// \f$ source \in X \f$ and minimal outgoing capacity).
1.676 + ///
1.677 + /// \pre \ref init() must be called before using this function.
1.678 void calculateOut() {
1.679 findMinCutOut();
1.680 }
1.681
1.682 - /// \brief Calculates a minimum cut with \f$ source \f$ on the
1.683 - /// target-side.
1.684 + /// \brief Calculate a minimum cut with \f$ source \f$ on the
1.685 + /// sink-side.
1.686 ///
1.687 - /// Calculates a minimum cut with \f$ source \f$ on the
1.688 - /// target-side (i.e. a set \f$ X\subsetneq V \f$ with
1.689 - /// \f$ source \in X \f$ and minimal out-degree).
1.690 + /// This function calculates a minimum cut with \f$ source \f$ on the
1.691 + /// sink-side (i.e. a set \f$ X\subsetneq V \f$ with
1.692 + /// \f$ source \notin X \f$ and minimal outgoing capacity).
1.693 + ///
1.694 + /// \pre \ref init() must be called before using this function.
1.695 void calculateIn() {
1.696 findMinCutIn();
1.697 }
1.698
1.699
1.700 - /// \brief Runs the algorithm.
1.701 + /// \brief Run the algorithm.
1.702 ///
1.703 - /// Runs the algorithm. It finds nodes \c source and \c target
1.704 - /// arbitrarily and then calls \ref init(), \ref calculateOut()
1.705 + /// This function runs the algorithm. It finds nodes \c source and
1.706 + /// \c target arbitrarily and then calls \ref init(), \ref calculateOut()
1.707 /// and \ref calculateIn().
1.708 void run() {
1.709 init();
1.710 @@ -911,11 +925,11 @@
1.711 calculateIn();
1.712 }
1.713
1.714 - /// \brief Runs the algorithm.
1.715 + /// \brief Run the algorithm.
1.716 ///
1.717 - /// Runs the algorithm. It uses the given \c source node, finds a
1.718 - /// proper \c target and then calls the \ref init(), \ref
1.719 - /// calculateOut() and \ref calculateIn().
1.720 + /// This function runs the algorithm. It uses the given \c source node,
1.721 + /// finds a proper \c target node and then calls the \ref init(),
1.722 + /// \ref calculateOut() and \ref calculateIn().
1.723 void run(const Node& s) {
1.724 init(s);
1.725 calculateOut();
1.726 @@ -926,32 +940,41 @@
1.727
1.728 /// \name Query Functions
1.729 /// The result of the %HaoOrlin algorithm
1.730 - /// can be obtained using these functions.
1.731 - /// \n
1.732 - /// Before using these functions, either \ref run(), \ref
1.733 - /// calculateOut() or \ref calculateIn() must be called.
1.734 + /// can be obtained using these functions.\n
1.735 + /// \ref run(), \ref calculateOut() or \ref calculateIn()
1.736 + /// should be called before using them.
1.737
1.738 /// @{
1.739
1.740 - /// \brief Returns the value of the minimum value cut.
1.741 + /// \brief Return the value of the minimum cut.
1.742 ///
1.743 - /// Returns the value of the minimum value cut.
1.744 + /// This function returns the value of the minimum cut.
1.745 + ///
1.746 + /// \pre \ref run(), \ref calculateOut() or \ref calculateIn()
1.747 + /// must be called before using this function.
1.748 Value minCutValue() const {
1.749 return _min_cut;
1.750 }
1.751
1.752
1.753 - /// \brief Returns a minimum cut.
1.754 + /// \brief Return a minimum cut.
1.755 ///
1.756 - /// Sets \c nodeMap to the characteristic vector of a minimum
1.757 - /// value cut: it will give a nonempty set \f$ X\subsetneq V \f$
1.758 - /// with minimal out-degree (i.e. \c nodeMap will be true exactly
1.759 - /// for the nodes of \f$ X \f$). \pre nodeMap should be a
1.760 - /// bool-valued node-map.
1.761 - template <typename NodeMap>
1.762 - Value minCutMap(NodeMap& nodeMap) const {
1.763 + /// This function sets \c cutMap to the characteristic vector of a
1.764 + /// minimum value cut: it will give a non-empty set \f$ X\subsetneq V \f$
1.765 + /// with minimal outgoing capacity (i.e. \c cutMap will be \c true exactly
1.766 + /// for the nodes of \f$ X \f$).
1.767 + ///
1.768 + /// \param cutMap A \ref concepts::WriteMap "writable" node map with
1.769 + /// \c bool (or convertible) value type.
1.770 + ///
1.771 + /// \return The value of the minimum cut.
1.772 + ///
1.773 + /// \pre \ref run(), \ref calculateOut() or \ref calculateIn()
1.774 + /// must be called before using this function.
1.775 + template <typename CutMap>
1.776 + Value minCutMap(CutMap& cutMap) const {
1.777 for (NodeIt it(_graph); it != INVALID; ++it) {
1.778 - nodeMap.set(it, (*_min_cut_map)[it]);
1.779 + cutMap.set(it, (*_min_cut_map)[it]);
1.780 }
1.781 return _min_cut;
1.782 }
1.783 @@ -960,7 +983,6 @@
1.784
1.785 }; //class HaoOrlin
1.786
1.787 -
1.788 } //namespace lemon
1.789
1.790 #endif //LEMON_HAO_ORLIN_H