1.1 --- a/lemon/hao_orlin.h Sat Apr 18 21:54:30 2009 +0200
1.2 +++ b/lemon/hao_orlin.h Tue Apr 21 10:34:49 2009 +0100
1.3 @@ -31,39 +31,41 @@
1.4 /// \ingroup min_cut
1.5 /// \brief Implementation of the Hao-Orlin algorithm.
1.6 ///
1.7 -/// Implementation of the Hao-Orlin algorithm class for testing network
1.8 -/// reliability.
1.9 +/// Implementation of the Hao-Orlin algorithm for finding a minimum cut
1.10 +/// in a digraph.
1.11
1.12 namespace lemon {
1.13
1.14 /// \ingroup min_cut
1.15 ///
1.16 - /// \brief %Hao-Orlin algorithm to find a minimum cut in directed graphs.
1.17 + /// \brief Hao-Orlin algorithm for finding a minimum cut in a digraph.
1.18 ///
1.19 - /// Hao-Orlin calculates a minimum cut in a directed graph
1.20 - /// \f$D=(V,A)\f$. It takes a fixed node \f$ source \in V \f$ and
1.21 + /// This class implements the Hao-Orlin algorithm for finding a minimum
1.22 + /// value cut in a directed graph \f$D=(V,A)\f$.
1.23 + /// It takes a fixed node \f$ source \in V \f$ and
1.24 /// consists of two phases: in the first phase it determines a
1.25 /// minimum cut with \f$ source \f$ on the source-side (i.e. a set
1.26 - /// \f$ X\subsetneq V \f$ with \f$ source \in X \f$ and minimal
1.27 - /// out-degree) and in the second phase it determines a minimum cut
1.28 + /// \f$ X\subsetneq V \f$ with \f$ source \in X \f$ and minimal outgoing
1.29 + /// capacity) and in the second phase it determines a minimum cut
1.30 /// with \f$ source \f$ on the sink-side (i.e. a set
1.31 - /// \f$ X\subsetneq V \f$ with \f$ source \notin X \f$ and minimal
1.32 - /// out-degree). Obviously, the smaller of these two cuts will be a
1.33 + /// \f$ X\subsetneq V \f$ with \f$ source \notin X \f$ and minimal outgoing
1.34 + /// capacity). Obviously, the smaller of these two cuts will be a
1.35 /// minimum cut of \f$ D \f$. The algorithm is a modified
1.36 - /// push-relabel preflow algorithm and our implementation calculates
1.37 + /// preflow push-relabel algorithm. Our implementation calculates
1.38 /// the minimum cut in \f$ O(n^2\sqrt{m}) \f$ time (we use the
1.39 /// highest-label rule), or in \f$O(nm)\f$ for unit capacities. The
1.40 - /// purpose of such algorithm is testing network reliability. For an
1.41 - /// undirected graph you can run just the first phase of the
1.42 - /// algorithm or you can use the algorithm of Nagamochi and Ibaraki
1.43 - /// which solves the undirected problem in
1.44 - /// \f$ O(nm + n^2 \log n) \f$ time: it is implemented in the
1.45 - /// NagamochiIbaraki algorithm class.
1.46 + /// purpose of such algorithm is e.g. testing network reliability.
1.47 ///
1.48 - /// \param GR The digraph class the algorithm runs on.
1.49 - /// \param CAP An arc map of capacities which can be any numreric type.
1.50 - /// The default type is \ref concepts::Digraph::ArcMap "GR::ArcMap<int>".
1.51 - /// \param TOL Tolerance class for handling inexact computations. The
1.52 + /// For an undirected graph you can run just the first phase of the
1.53 + /// algorithm or you can use the algorithm of Nagamochi and Ibaraki,
1.54 + /// which solves the undirected problem in \f$ O(nm + n^2 \log n) \f$
1.55 + /// time. It is implemented in the NagamochiIbaraki algorithm class.
1.56 + ///
1.57 + /// \tparam GR The type of the digraph the algorithm runs on.
1.58 + /// \tparam CAP The type of the arc map containing the capacities,
1.59 + /// which can be any numreric type. The default map type is
1.60 + /// \ref concepts::Digraph::ArcMap "GR::ArcMap<int>".
1.61 + /// \tparam TOL Tolerance class for handling inexact computations. The
1.62 /// default tolerance type is \ref Tolerance "Tolerance<CAP::Value>".
1.63 #ifdef DOXYGEN
1.64 template <typename GR, typename CAP, typename TOL>
1.65 @@ -73,15 +75,20 @@
1.66 typename TOL = Tolerance<typename CAP::Value> >
1.67 #endif
1.68 class HaoOrlin {
1.69 + public:
1.70 +
1.71 + /// The digraph type of the algorithm
1.72 + typedef GR Digraph;
1.73 + /// The capacity map type of the algorithm
1.74 + typedef CAP CapacityMap;
1.75 + /// The tolerance type of the algorithm
1.76 + typedef TOL Tolerance;
1.77 +
1.78 private:
1.79
1.80 - typedef GR Digraph;
1.81 - typedef CAP CapacityMap;
1.82 - typedef TOL Tolerance;
1.83 -
1.84 typedef typename CapacityMap::Value Value;
1.85
1.86 - TEMPLATE_GRAPH_TYPEDEFS(Digraph);
1.87 + TEMPLATE_DIGRAPH_TYPEDEFS(Digraph);
1.88
1.89 const Digraph& _graph;
1.90 const CapacityMap* _capacity;
1.91 @@ -161,56 +168,56 @@
1.92 private:
1.93
1.94 void activate(const Node& i) {
1.95 - _active->set(i, true);
1.96 + (*_active)[i] = true;
1.97
1.98 int bucket = (*_bucket)[i];
1.99
1.100 if ((*_prev)[i] == INVALID || (*_active)[(*_prev)[i]]) return;
1.101 //unlace
1.102 - _next->set((*_prev)[i], (*_next)[i]);
1.103 + (*_next)[(*_prev)[i]] = (*_next)[i];
1.104 if ((*_next)[i] != INVALID) {
1.105 - _prev->set((*_next)[i], (*_prev)[i]);
1.106 + (*_prev)[(*_next)[i]] = (*_prev)[i];
1.107 } else {
1.108 _last[bucket] = (*_prev)[i];
1.109 }
1.110 //lace
1.111 - _next->set(i, _first[bucket]);
1.112 - _prev->set(_first[bucket], i);
1.113 - _prev->set(i, INVALID);
1.114 + (*_next)[i] = _first[bucket];
1.115 + (*_prev)[_first[bucket]] = i;
1.116 + (*_prev)[i] = INVALID;
1.117 _first[bucket] = i;
1.118 }
1.119
1.120 void deactivate(const Node& i) {
1.121 - _active->set(i, false);
1.122 + (*_active)[i] = false;
1.123 int bucket = (*_bucket)[i];
1.124
1.125 if ((*_next)[i] == INVALID || !(*_active)[(*_next)[i]]) return;
1.126
1.127 //unlace
1.128 - _prev->set((*_next)[i], (*_prev)[i]);
1.129 + (*_prev)[(*_next)[i]] = (*_prev)[i];
1.130 if ((*_prev)[i] != INVALID) {
1.131 - _next->set((*_prev)[i], (*_next)[i]);
1.132 + (*_next)[(*_prev)[i]] = (*_next)[i];
1.133 } else {
1.134 _first[bucket] = (*_next)[i];
1.135 }
1.136 //lace
1.137 - _prev->set(i, _last[bucket]);
1.138 - _next->set(_last[bucket], i);
1.139 - _next->set(i, INVALID);
1.140 + (*_prev)[i] = _last[bucket];
1.141 + (*_next)[_last[bucket]] = i;
1.142 + (*_next)[i] = INVALID;
1.143 _last[bucket] = i;
1.144 }
1.145
1.146 void addItem(const Node& i, int bucket) {
1.147 (*_bucket)[i] = bucket;
1.148 if (_last[bucket] != INVALID) {
1.149 - _prev->set(i, _last[bucket]);
1.150 - _next->set(_last[bucket], i);
1.151 - _next->set(i, INVALID);
1.152 + (*_prev)[i] = _last[bucket];
1.153 + (*_next)[_last[bucket]] = i;
1.154 + (*_next)[i] = INVALID;
1.155 _last[bucket] = i;
1.156 } else {
1.157 - _prev->set(i, INVALID);
1.158 + (*_prev)[i] = INVALID;
1.159 _first[bucket] = i;
1.160 - _next->set(i, INVALID);
1.161 + (*_next)[i] = INVALID;
1.162 _last[bucket] = i;
1.163 }
1.164 }
1.165 @@ -218,11 +225,12 @@
1.166 void findMinCutOut() {
1.167
1.168 for (NodeIt n(_graph); n != INVALID; ++n) {
1.169 - _excess->set(n, 0);
1.170 + (*_excess)[n] = 0;
1.171 + (*_source_set)[n] = false;
1.172 }
1.173
1.174 for (ArcIt a(_graph); a != INVALID; ++a) {
1.175 - _flow->set(a, 0);
1.176 + (*_flow)[a] = 0;
1.177 }
1.178
1.179 int bucket_num = 0;
1.180 @@ -232,7 +240,7 @@
1.181 {
1.182 typename Digraph::template NodeMap<bool> reached(_graph, false);
1.183
1.184 - reached.set(_source, true);
1.185 + reached[_source] = true;
1.186 bool first_set = true;
1.187
1.188 for (NodeIt t(_graph); t != INVALID; ++t) {
1.189 @@ -240,7 +248,7 @@
1.190 _sets.push_front(std::list<int>());
1.191
1.192 queue[qlast++] = t;
1.193 - reached.set(t, true);
1.194 + reached[t] = true;
1.195
1.196 while (qfirst != qlast) {
1.197 if (qsep == qfirst) {
1.198 @@ -257,7 +265,7 @@
1.199 for (InArcIt a(_graph, n); a != INVALID; ++a) {
1.200 Node u = _graph.source(a);
1.201 if (!reached[u] && _tolerance.positive((*_capacity)[a])) {
1.202 - reached.set(u, true);
1.203 + reached[u] = true;
1.204 queue[qlast++] = u;
1.205 }
1.206 }
1.207 @@ -266,18 +274,18 @@
1.208 }
1.209
1.210 ++bucket_num;
1.211 - _bucket->set(_source, 0);
1.212 + (*_bucket)[_source] = 0;
1.213 _dormant[0] = true;
1.214 }
1.215 - _source_set->set(_source, true);
1.216 + (*_source_set)[_source] = true;
1.217
1.218 Node target = _last[_sets.back().back()];
1.219 {
1.220 for (OutArcIt a(_graph, _source); a != INVALID; ++a) {
1.221 if (_tolerance.positive((*_capacity)[a])) {
1.222 Node u = _graph.target(a);
1.223 - _flow->set(a, (*_capacity)[a]);
1.224 - _excess->set(u, (*_excess)[u] + (*_capacity)[a]);
1.225 + (*_flow)[a] = (*_capacity)[a];
1.226 + (*_excess)[u] += (*_capacity)[a];
1.227 if (!(*_active)[u] && u != _source) {
1.228 activate(u);
1.229 }
1.230 @@ -318,14 +326,14 @@
1.231 activate(v);
1.232 }
1.233 if (!_tolerance.less(rem, excess)) {
1.234 - _flow->set(a, (*_flow)[a] + excess);
1.235 - _excess->set(v, (*_excess)[v] + excess);
1.236 + (*_flow)[a] += excess;
1.237 + (*_excess)[v] += excess;
1.238 excess = 0;
1.239 goto no_more_push;
1.240 } else {
1.241 excess -= rem;
1.242 - _excess->set(v, (*_excess)[v] + rem);
1.243 - _flow->set(a, (*_capacity)[a]);
1.244 + (*_excess)[v] += rem;
1.245 + (*_flow)[a] = (*_capacity)[a];
1.246 }
1.247 } else if (next_bucket > (*_bucket)[v]) {
1.248 next_bucket = (*_bucket)[v];
1.249 @@ -342,14 +350,14 @@
1.250 activate(v);
1.251 }
1.252 if (!_tolerance.less(rem, excess)) {
1.253 - _flow->set(a, (*_flow)[a] - excess);
1.254 - _excess->set(v, (*_excess)[v] + excess);
1.255 + (*_flow)[a] -= excess;
1.256 + (*_excess)[v] += excess;
1.257 excess = 0;
1.258 goto no_more_push;
1.259 } else {
1.260 excess -= rem;
1.261 - _excess->set(v, (*_excess)[v] + rem);
1.262 - _flow->set(a, 0);
1.263 + (*_excess)[v] += rem;
1.264 + (*_flow)[a] = 0;
1.265 }
1.266 } else if (next_bucket > (*_bucket)[v]) {
1.267 next_bucket = (*_bucket)[v];
1.268 @@ -358,7 +366,7 @@
1.269
1.270 no_more_push:
1.271
1.272 - _excess->set(n, excess);
1.273 + (*_excess)[n] = excess;
1.274
1.275 if (excess != 0) {
1.276 if ((*_next)[n] == INVALID) {
1.277 @@ -376,16 +384,16 @@
1.278 }
1.279 } else if (next_bucket == _node_num) {
1.280 _first[(*_bucket)[n]] = (*_next)[n];
1.281 - _prev->set((*_next)[n], INVALID);
1.282 + (*_prev)[(*_next)[n]] = INVALID;
1.283
1.284 std::list<std::list<int> >::iterator new_set =
1.285 _sets.insert(--_sets.end(), std::list<int>());
1.286
1.287 new_set->push_front(bucket_num);
1.288 - _bucket->set(n, bucket_num);
1.289 + (*_bucket)[n] = bucket_num;
1.290 _first[bucket_num] = _last[bucket_num] = n;
1.291 - _next->set(n, INVALID);
1.292 - _prev->set(n, INVALID);
1.293 + (*_next)[n] = INVALID;
1.294 + (*_prev)[n] = INVALID;
1.295 _dormant[bucket_num] = true;
1.296 ++bucket_num;
1.297
1.298 @@ -395,7 +403,7 @@
1.299 }
1.300 } else {
1.301 _first[*_highest] = (*_next)[n];
1.302 - _prev->set((*_next)[n], INVALID);
1.303 + (*_prev)[(*_next)[n]] = INVALID;
1.304
1.305 while (next_bucket != *_highest) {
1.306 --_highest;
1.307 @@ -409,10 +417,10 @@
1.308 }
1.309 --_highest;
1.310
1.311 - _bucket->set(n, *_highest);
1.312 - _next->set(n, _first[*_highest]);
1.313 + (*_bucket)[n] = *_highest;
1.314 + (*_next)[n] = _first[*_highest];
1.315 if (_first[*_highest] != INVALID) {
1.316 - _prev->set(_first[*_highest], n);
1.317 + (*_prev)[_first[*_highest]] = n;
1.318 } else {
1.319 _last[*_highest] = n;
1.320 }
1.321 @@ -434,13 +442,13 @@
1.322 if ((*_excess)[target] < _min_cut) {
1.323 _min_cut = (*_excess)[target];
1.324 for (NodeIt i(_graph); i != INVALID; ++i) {
1.325 - _min_cut_map->set(i, true);
1.326 + (*_min_cut_map)[i] = true;
1.327 }
1.328 for (std::list<int>::iterator it = _sets.back().begin();
1.329 it != _sets.back().end(); ++it) {
1.330 Node n = _first[*it];
1.331 while (n != INVALID) {
1.332 - _min_cut_map->set(n, false);
1.333 + (*_min_cut_map)[n] = false;
1.334 n = (*_next)[n];
1.335 }
1.336 }
1.337 @@ -453,13 +461,13 @@
1.338 _last[(*_bucket)[target]] = (*_prev)[target];
1.339 new_target = (*_prev)[target];
1.340 } else {
1.341 - _prev->set((*_next)[target], (*_prev)[target]);
1.342 + (*_prev)[(*_next)[target]] = (*_prev)[target];
1.343 new_target = (*_next)[target];
1.344 }
1.345 if ((*_prev)[target] == INVALID) {
1.346 _first[(*_bucket)[target]] = (*_next)[target];
1.347 } else {
1.348 - _next->set((*_prev)[target], (*_next)[target]);
1.349 + (*_next)[(*_prev)[target]] = (*_next)[target];
1.350 }
1.351 } else {
1.352 _sets.back().pop_back();
1.353 @@ -475,9 +483,9 @@
1.354 new_target = _last[_sets.back().back()];
1.355 }
1.356
1.357 - _bucket->set(target, 0);
1.358 + (*_bucket)[target] = 0;
1.359
1.360 - _source_set->set(target, true);
1.361 + (*_source_set)[target] = true;
1.362 for (OutArcIt a(_graph, target); a != INVALID; ++a) {
1.363 Value rem = (*_capacity)[a] - (*_flow)[a];
1.364 if (!_tolerance.positive(rem)) continue;
1.365 @@ -485,8 +493,8 @@
1.366 if (!(*_active)[v] && !(*_source_set)[v]) {
1.367 activate(v);
1.368 }
1.369 - _excess->set(v, (*_excess)[v] + rem);
1.370 - _flow->set(a, (*_capacity)[a]);
1.371 + (*_excess)[v] += rem;
1.372 + (*_flow)[a] = (*_capacity)[a];
1.373 }
1.374
1.375 for (InArcIt a(_graph, target); a != INVALID; ++a) {
1.376 @@ -496,8 +504,8 @@
1.377 if (!(*_active)[v] && !(*_source_set)[v]) {
1.378 activate(v);
1.379 }
1.380 - _excess->set(v, (*_excess)[v] + rem);
1.381 - _flow->set(a, 0);
1.382 + (*_excess)[v] += rem;
1.383 + (*_flow)[a] = 0;
1.384 }
1.385
1.386 target = new_target;
1.387 @@ -517,11 +525,12 @@
1.388 void findMinCutIn() {
1.389
1.390 for (NodeIt n(_graph); n != INVALID; ++n) {
1.391 - _excess->set(n, 0);
1.392 + (*_excess)[n] = 0;
1.393 + (*_source_set)[n] = false;
1.394 }
1.395
1.396 for (ArcIt a(_graph); a != INVALID; ++a) {
1.397 - _flow->set(a, 0);
1.398 + (*_flow)[a] = 0;
1.399 }
1.400
1.401 int bucket_num = 0;
1.402 @@ -531,7 +540,7 @@
1.403 {
1.404 typename Digraph::template NodeMap<bool> reached(_graph, false);
1.405
1.406 - reached.set(_source, true);
1.407 + reached[_source] = true;
1.408
1.409 bool first_set = true;
1.410
1.411 @@ -540,7 +549,7 @@
1.412 _sets.push_front(std::list<int>());
1.413
1.414 queue[qlast++] = t;
1.415 - reached.set(t, true);
1.416 + reached[t] = true;
1.417
1.418 while (qfirst != qlast) {
1.419 if (qsep == qfirst) {
1.420 @@ -557,7 +566,7 @@
1.421 for (OutArcIt a(_graph, n); a != INVALID; ++a) {
1.422 Node u = _graph.target(a);
1.423 if (!reached[u] && _tolerance.positive((*_capacity)[a])) {
1.424 - reached.set(u, true);
1.425 + reached[u] = true;
1.426 queue[qlast++] = u;
1.427 }
1.428 }
1.429 @@ -566,18 +575,18 @@
1.430 }
1.431
1.432 ++bucket_num;
1.433 - _bucket->set(_source, 0);
1.434 + (*_bucket)[_source] = 0;
1.435 _dormant[0] = true;
1.436 }
1.437 - _source_set->set(_source, true);
1.438 + (*_source_set)[_source] = true;
1.439
1.440 Node target = _last[_sets.back().back()];
1.441 {
1.442 for (InArcIt a(_graph, _source); a != INVALID; ++a) {
1.443 if (_tolerance.positive((*_capacity)[a])) {
1.444 Node u = _graph.source(a);
1.445 - _flow->set(a, (*_capacity)[a]);
1.446 - _excess->set(u, (*_excess)[u] + (*_capacity)[a]);
1.447 + (*_flow)[a] = (*_capacity)[a];
1.448 + (*_excess)[u] += (*_capacity)[a];
1.449 if (!(*_active)[u] && u != _source) {
1.450 activate(u);
1.451 }
1.452 @@ -618,14 +627,14 @@
1.453 activate(v);
1.454 }
1.455 if (!_tolerance.less(rem, excess)) {
1.456 - _flow->set(a, (*_flow)[a] + excess);
1.457 - _excess->set(v, (*_excess)[v] + excess);
1.458 + (*_flow)[a] += excess;
1.459 + (*_excess)[v] += excess;
1.460 excess = 0;
1.461 goto no_more_push;
1.462 } else {
1.463 excess -= rem;
1.464 - _excess->set(v, (*_excess)[v] + rem);
1.465 - _flow->set(a, (*_capacity)[a]);
1.466 + (*_excess)[v] += rem;
1.467 + (*_flow)[a] = (*_capacity)[a];
1.468 }
1.469 } else if (next_bucket > (*_bucket)[v]) {
1.470 next_bucket = (*_bucket)[v];
1.471 @@ -642,14 +651,14 @@
1.472 activate(v);
1.473 }
1.474 if (!_tolerance.less(rem, excess)) {
1.475 - _flow->set(a, (*_flow)[a] - excess);
1.476 - _excess->set(v, (*_excess)[v] + excess);
1.477 + (*_flow)[a] -= excess;
1.478 + (*_excess)[v] += excess;
1.479 excess = 0;
1.480 goto no_more_push;
1.481 } else {
1.482 excess -= rem;
1.483 - _excess->set(v, (*_excess)[v] + rem);
1.484 - _flow->set(a, 0);
1.485 + (*_excess)[v] += rem;
1.486 + (*_flow)[a] = 0;
1.487 }
1.488 } else if (next_bucket > (*_bucket)[v]) {
1.489 next_bucket = (*_bucket)[v];
1.490 @@ -658,7 +667,7 @@
1.491
1.492 no_more_push:
1.493
1.494 - _excess->set(n, excess);
1.495 + (*_excess)[n] = excess;
1.496
1.497 if (excess != 0) {
1.498 if ((*_next)[n] == INVALID) {
1.499 @@ -676,16 +685,16 @@
1.500 }
1.501 } else if (next_bucket == _node_num) {
1.502 _first[(*_bucket)[n]] = (*_next)[n];
1.503 - _prev->set((*_next)[n], INVALID);
1.504 + (*_prev)[(*_next)[n]] = INVALID;
1.505
1.506 std::list<std::list<int> >::iterator new_set =
1.507 _sets.insert(--_sets.end(), std::list<int>());
1.508
1.509 new_set->push_front(bucket_num);
1.510 - _bucket->set(n, bucket_num);
1.511 + (*_bucket)[n] = bucket_num;
1.512 _first[bucket_num] = _last[bucket_num] = n;
1.513 - _next->set(n, INVALID);
1.514 - _prev->set(n, INVALID);
1.515 + (*_next)[n] = INVALID;
1.516 + (*_prev)[n] = INVALID;
1.517 _dormant[bucket_num] = true;
1.518 ++bucket_num;
1.519
1.520 @@ -695,7 +704,7 @@
1.521 }
1.522 } else {
1.523 _first[*_highest] = (*_next)[n];
1.524 - _prev->set((*_next)[n], INVALID);
1.525 + (*_prev)[(*_next)[n]] = INVALID;
1.526
1.527 while (next_bucket != *_highest) {
1.528 --_highest;
1.529 @@ -708,10 +717,10 @@
1.530 }
1.531 --_highest;
1.532
1.533 - _bucket->set(n, *_highest);
1.534 - _next->set(n, _first[*_highest]);
1.535 + (*_bucket)[n] = *_highest;
1.536 + (*_next)[n] = _first[*_highest];
1.537 if (_first[*_highest] != INVALID) {
1.538 - _prev->set(_first[*_highest], n);
1.539 + (*_prev)[_first[*_highest]] = n;
1.540 } else {
1.541 _last[*_highest] = n;
1.542 }
1.543 @@ -733,13 +742,13 @@
1.544 if ((*_excess)[target] < _min_cut) {
1.545 _min_cut = (*_excess)[target];
1.546 for (NodeIt i(_graph); i != INVALID; ++i) {
1.547 - _min_cut_map->set(i, false);
1.548 + (*_min_cut_map)[i] = false;
1.549 }
1.550 for (std::list<int>::iterator it = _sets.back().begin();
1.551 it != _sets.back().end(); ++it) {
1.552 Node n = _first[*it];
1.553 while (n != INVALID) {
1.554 - _min_cut_map->set(n, true);
1.555 + (*_min_cut_map)[n] = true;
1.556 n = (*_next)[n];
1.557 }
1.558 }
1.559 @@ -752,13 +761,13 @@
1.560 _last[(*_bucket)[target]] = (*_prev)[target];
1.561 new_target = (*_prev)[target];
1.562 } else {
1.563 - _prev->set((*_next)[target], (*_prev)[target]);
1.564 + (*_prev)[(*_next)[target]] = (*_prev)[target];
1.565 new_target = (*_next)[target];
1.566 }
1.567 if ((*_prev)[target] == INVALID) {
1.568 _first[(*_bucket)[target]] = (*_next)[target];
1.569 } else {
1.570 - _next->set((*_prev)[target], (*_next)[target]);
1.571 + (*_next)[(*_prev)[target]] = (*_next)[target];
1.572 }
1.573 } else {
1.574 _sets.back().pop_back();
1.575 @@ -774,9 +783,9 @@
1.576 new_target = _last[_sets.back().back()];
1.577 }
1.578
1.579 - _bucket->set(target, 0);
1.580 + (*_bucket)[target] = 0;
1.581
1.582 - _source_set->set(target, true);
1.583 + (*_source_set)[target] = true;
1.584 for (InArcIt a(_graph, target); a != INVALID; ++a) {
1.585 Value rem = (*_capacity)[a] - (*_flow)[a];
1.586 if (!_tolerance.positive(rem)) continue;
1.587 @@ -784,8 +793,8 @@
1.588 if (!(*_active)[v] && !(*_source_set)[v]) {
1.589 activate(v);
1.590 }
1.591 - _excess->set(v, (*_excess)[v] + rem);
1.592 - _flow->set(a, (*_capacity)[a]);
1.593 + (*_excess)[v] += rem;
1.594 + (*_flow)[a] = (*_capacity)[a];
1.595 }
1.596
1.597 for (OutArcIt a(_graph, target); a != INVALID; ++a) {
1.598 @@ -795,8 +804,8 @@
1.599 if (!(*_active)[v] && !(*_source_set)[v]) {
1.600 activate(v);
1.601 }
1.602 - _excess->set(v, (*_excess)[v] + rem);
1.603 - _flow->set(a, 0);
1.604 + (*_excess)[v] += rem;
1.605 + (*_flow)[a] = 0;
1.606 }
1.607
1.608 target = new_target;
1.609 @@ -815,31 +824,32 @@
1.610
1.611 public:
1.612
1.613 - /// \name Execution control
1.614 + /// \name Execution Control
1.615 /// The simplest way to execute the algorithm is to use
1.616 /// one of the member functions called \ref run().
1.617 /// \n
1.618 - /// If you need more control on the execution,
1.619 - /// first you must call \ref init(), then the \ref calculateIn() or
1.620 - /// \ref calculateOut() functions.
1.621 + /// If you need better control on the execution,
1.622 + /// you have to call one of the \ref init() functions first, then
1.623 + /// \ref calculateOut() and/or \ref calculateIn().
1.624
1.625 /// @{
1.626
1.627 - /// \brief Initializes the internal data structures.
1.628 + /// \brief Initialize the internal data structures.
1.629 ///
1.630 - /// Initializes the internal data structures. It creates
1.631 - /// the maps, residual graph adaptors and some bucket structures
1.632 - /// for the algorithm.
1.633 + /// This function initializes the internal data structures. It creates
1.634 + /// the maps and some bucket structures for the algorithm.
1.635 + /// The first node is used as the source node for the push-relabel
1.636 + /// algorithm.
1.637 void init() {
1.638 init(NodeIt(_graph));
1.639 }
1.640
1.641 - /// \brief Initializes the internal data structures.
1.642 + /// \brief Initialize the internal data structures.
1.643 ///
1.644 - /// Initializes the internal data structures. It creates
1.645 - /// the maps, residual graph adaptor and some bucket structures
1.646 - /// for the algorithm. Node \c source is used as the push-relabel
1.647 - /// algorithm's source.
1.648 + /// This function initializes the internal data structures. It creates
1.649 + /// the maps and some bucket structures for the algorithm.
1.650 + /// The given node is used as the source node for the push-relabel
1.651 + /// algorithm.
1.652 void init(const Node& source) {
1.653 _source = source;
1.654
1.655 @@ -879,31 +889,35 @@
1.656 }
1.657
1.658
1.659 - /// \brief Calculates a minimum cut with \f$ source \f$ on the
1.660 + /// \brief Calculate a minimum cut with \f$ source \f$ on the
1.661 /// source-side.
1.662 ///
1.663 - /// Calculates a minimum cut with \f$ source \f$ on the
1.664 + /// This function calculates a minimum cut with \f$ source \f$ on the
1.665 /// source-side (i.e. a set \f$ X\subsetneq V \f$ with
1.666 - /// \f$ source \in X \f$ and minimal out-degree).
1.667 + /// \f$ source \in X \f$ and minimal outgoing capacity).
1.668 + ///
1.669 + /// \pre \ref init() must be called before using this function.
1.670 void calculateOut() {
1.671 findMinCutOut();
1.672 }
1.673
1.674 - /// \brief Calculates a minimum cut with \f$ source \f$ on the
1.675 - /// target-side.
1.676 + /// \brief Calculate a minimum cut with \f$ source \f$ on the
1.677 + /// sink-side.
1.678 ///
1.679 - /// Calculates a minimum cut with \f$ source \f$ on the
1.680 - /// target-side (i.e. a set \f$ X\subsetneq V \f$ with
1.681 - /// \f$ source \in X \f$ and minimal out-degree).
1.682 + /// This function calculates a minimum cut with \f$ source \f$ on the
1.683 + /// sink-side (i.e. a set \f$ X\subsetneq V \f$ with
1.684 + /// \f$ source \notin X \f$ and minimal outgoing capacity).
1.685 + ///
1.686 + /// \pre \ref init() must be called before using this function.
1.687 void calculateIn() {
1.688 findMinCutIn();
1.689 }
1.690
1.691
1.692 - /// \brief Runs the algorithm.
1.693 + /// \brief Run the algorithm.
1.694 ///
1.695 - /// Runs the algorithm. It finds nodes \c source and \c target
1.696 - /// arbitrarily and then calls \ref init(), \ref calculateOut()
1.697 + /// This function runs the algorithm. It finds nodes \c source and
1.698 + /// \c target arbitrarily and then calls \ref init(), \ref calculateOut()
1.699 /// and \ref calculateIn().
1.700 void run() {
1.701 init();
1.702 @@ -911,11 +925,11 @@
1.703 calculateIn();
1.704 }
1.705
1.706 - /// \brief Runs the algorithm.
1.707 + /// \brief Run the algorithm.
1.708 ///
1.709 - /// Runs the algorithm. It uses the given \c source node, finds a
1.710 - /// proper \c target and then calls the \ref init(), \ref
1.711 - /// calculateOut() and \ref calculateIn().
1.712 + /// This function runs the algorithm. It uses the given \c source node,
1.713 + /// finds a proper \c target node and then calls the \ref init(),
1.714 + /// \ref calculateOut() and \ref calculateIn().
1.715 void run(const Node& s) {
1.716 init(s);
1.717 calculateOut();
1.718 @@ -926,32 +940,41 @@
1.719
1.720 /// \name Query Functions
1.721 /// The result of the %HaoOrlin algorithm
1.722 - /// can be obtained using these functions.
1.723 - /// \n
1.724 - /// Before using these functions, either \ref run(), \ref
1.725 - /// calculateOut() or \ref calculateIn() must be called.
1.726 + /// can be obtained using these functions.\n
1.727 + /// \ref run(), \ref calculateOut() or \ref calculateIn()
1.728 + /// should be called before using them.
1.729
1.730 /// @{
1.731
1.732 - /// \brief Returns the value of the minimum value cut.
1.733 + /// \brief Return the value of the minimum cut.
1.734 ///
1.735 - /// Returns the value of the minimum value cut.
1.736 + /// This function returns the value of the minimum cut.
1.737 + ///
1.738 + /// \pre \ref run(), \ref calculateOut() or \ref calculateIn()
1.739 + /// must be called before using this function.
1.740 Value minCutValue() const {
1.741 return _min_cut;
1.742 }
1.743
1.744
1.745 - /// \brief Returns a minimum cut.
1.746 + /// \brief Return a minimum cut.
1.747 ///
1.748 - /// Sets \c nodeMap to the characteristic vector of a minimum
1.749 - /// value cut: it will give a nonempty set \f$ X\subsetneq V \f$
1.750 - /// with minimal out-degree (i.e. \c nodeMap will be true exactly
1.751 - /// for the nodes of \f$ X \f$). \pre nodeMap should be a
1.752 - /// bool-valued node-map.
1.753 - template <typename NodeMap>
1.754 - Value minCutMap(NodeMap& nodeMap) const {
1.755 + /// This function sets \c cutMap to the characteristic vector of a
1.756 + /// minimum value cut: it will give a non-empty set \f$ X\subsetneq V \f$
1.757 + /// with minimal outgoing capacity (i.e. \c cutMap will be \c true exactly
1.758 + /// for the nodes of \f$ X \f$).
1.759 + ///
1.760 + /// \param cutMap A \ref concepts::WriteMap "writable" node map with
1.761 + /// \c bool (or convertible) value type.
1.762 + ///
1.763 + /// \return The value of the minimum cut.
1.764 + ///
1.765 + /// \pre \ref run(), \ref calculateOut() or \ref calculateIn()
1.766 + /// must be called before using this function.
1.767 + template <typename CutMap>
1.768 + Value minCutMap(CutMap& cutMap) const {
1.769 for (NodeIt it(_graph); it != INVALID; ++it) {
1.770 - nodeMap.set(it, (*_min_cut_map)[it]);
1.771 + cutMap.set(it, (*_min_cut_map)[it]);
1.772 }
1.773 return _min_cut;
1.774 }
1.775 @@ -960,7 +983,6 @@
1.776
1.777 }; //class HaoOrlin
1.778
1.779 -
1.780 } //namespace lemon
1.781
1.782 #endif //LEMON_HAO_ORLIN_H