# HG changeset patch # User deba # Date 1196765727 0 # Node ID f86f7e4eb2baa221e5062aae61fdbf85d6e474d5 # Parent 93de38566e6c12ba01e313149b750c146dba20ac Reimplementation of Hao-Orlin algorithm Little modifictaion in NagamochiIbaraki More docs for minimum cut algorithms diff -r 93de38566e6c -r f86f7e4eb2ba doc/groups.dox --- a/doc/groups.dox Fri Nov 30 09:22:38 2007 +0000 +++ b/doc/groups.dox Tue Dec 04 10:55:27 2007 +0000 @@ -258,13 +258,34 @@ */ /** -@defgroup min_cut Minimum Cut algorithms -@ingroup algs -\brief This group describes the algorithms -for finding minimum cut in graphs. +@defgroup min_cut Minimum Cut algorithms +@ingroup algs -This group describes the algorithms -for finding minimum cut in graphs. +\brief This group describes the algorithms for finding minimum cut in +graphs. + +This group describes the algorithms for finding minimum cut in graphs. + +The minimum cut problem is to find a non-empty and non-complete +\f$X\f$ subset of the vertices with minimum overall capacity on +outgoing arcs. Formally, there is \f$G=(V,A)\f$ directed graph, an +\f$c_a:A\rightarrow\mathbf{R}^+_0\f$ capacity function. The minimum +cut is the solution of the next optimization problem: + +\f[ \min_{X \subset V, X\not\in \{\emptyset, V\}}\sum_{uv\in A, u\in X, v\not\in X}c_{uv}\f] + +The lemon contains several algorithms related to minimum cut problems: + +- \ref lemon::HaoOrlin "Hao-Orlin algorithm" for calculate minimum cut + in directed graphs +- \ref lemon::NagamochiIbaraki "Nagamochi-Ibaraki algorithm" for + calculate minimum cut in undirected graphs +- \ref lemon::GomoryHuTree "Gomory-Hu tree computation" for calculate all + pairs minimum cut in undirected graphs + +If you want to find minimum cut just between two distinict nodes, +please see the \ref max_flow "Maximum Flow page". + */ /** diff -r 93de38566e6c -r f86f7e4eb2ba lemon/hao_orlin.h --- a/lemon/hao_orlin.h Fri Nov 30 09:22:38 2007 +0000 +++ b/lemon/hao_orlin.h Tue Dec 04 10:55:27 2007 +0000 @@ -19,13 +19,9 @@ #ifndef LEMON_HAO_ORLIN_H #define LEMON_HAO_ORLIN_H -#include - - - #include -#include #include +#include #include #include @@ -37,7 +33,7 @@ /// \ingroup min_cut /// \brief Implementation of the Hao-Orlin algorithm. /// -/// Implementation of the HaoOrlin algorithms class for testing network +/// Implementation of the Hao-Orlin algorithm class for testing network /// reliability. namespace lemon { @@ -46,24 +42,25 @@ /// /// \brief %Hao-Orlin algorithm to find a minimum cut in directed graphs. /// - /// Hao-Orlin calculates a minimum cut in a directed graph - /// \f$ D=(V,A) \f$. It takes a fixed node \f$ source \in V \f$ and consists - /// of two phases: in the first phase it determines a minimum cut - /// with \f$ source \f$ on the source-side (i.e. a set \f$ X\subsetneq V \f$ - /// with \f$ source \in X \f$ and minimal out-degree) and in the - /// second phase it determines a minimum cut with \f$ source \f$ on the - /// sink-side (i.e. a set \f$ X\subsetneq V \f$ with \f$ source \notin X \f$ - /// and minimal out-degree). Obviously, the smaller of these two - /// cuts will be a minimum cut of \f$ D \f$. The algorithm is a - /// modified push-relabel preflow algorithm and our implementation - /// calculates the minimum cut in \f$ O(n^3) \f$ time (we use the - /// highest-label rule). The purpose of such an algorithm is testing - /// network reliability. For an undirected graph with \f$ n \f$ - /// nodes and \f$ e \f$ edges you can use the algorithm of Nagamochi - /// and Ibaraki which solves the undirected problem in - /// \f$ O(ne + n^2 \log(n)) \f$ time: it is implemented in the MinCut - /// algorithm - /// class. + /// Hao-Orlin calculates a minimum cut in a directed graph + /// \f$D=(V,A)\f$. It takes a fixed node \f$ source \in V \f$ and + /// consists of two phases: in the first phase it determines a + /// minimum cut with \f$ source \f$ on the source-side (i.e. a set + /// \f$ X\subsetneq V \f$ with \f$ source \in X \f$ and minimal + /// out-degree) and in the second phase it determines a minimum cut + /// with \f$ source \f$ on the sink-side (i.e. a set + /// \f$ X\subsetneq V \f$ with \f$ source \notin X \f$ and minimal + /// out-degree). Obviously, the smaller of these two cuts will be a + /// minimum cut of \f$ D \f$. The algorithm is a modified + /// push-relabel preflow algorithm and our implementation calculates + /// the minimum cut in \f$ O(n^2\sqrt{m}) \f$ time (we use the + /// highest-label rule), or in \f$O(nm)\f$ for unit capacities. The + /// purpose of such algorithm is testing network reliability. For an + /// undirected graph you can run just the first phase of the + /// algorithm or you can use the algorithm of Nagamochi and Ibaraki + /// which solves the undirected problem in + /// \f$ O(nm + n^2 \log(n)) \f$ time: it is implemented in the + /// NagamochiIbaraki algorithm class. /// /// \param _Graph is the graph type of the algorithm. /// \param _CapacityMap is an edge map of capacities which should @@ -80,7 +77,7 @@ typename _Tolerance = Tolerance > #endif class HaoOrlin { - protected: + private: typedef _Graph Graph; typedef _CapacityMap CapacityMap; @@ -88,44 +85,29 @@ typedef typename CapacityMap::Value Value; + GRAPH_TYPEDEFS(typename Graph); - typedef typename Graph::Node Node; - typedef typename Graph::NodeIt NodeIt; - typedef typename Graph::EdgeIt EdgeIt; - typedef typename Graph::OutEdgeIt OutEdgeIt; - typedef typename Graph::InEdgeIt InEdgeIt; - - const Graph* _graph; - + const Graph& _graph; const CapacityMap* _capacity; typedef typename Graph::template EdgeMap FlowMap; + FlowMap* _flow; - FlowMap* _preflow; + Node _source; - Node _source, _target; int _node_num; - typedef ResGraphAdaptor OutResGraph; - typedef typename OutResGraph::Edge OutResEdge; + // Bucketing structure + std::vector _first, _last; + typename Graph::template NodeMap* _next; + typename Graph::template NodeMap* _prev; + typename Graph::template NodeMap* _active; + typename Graph::template NodeMap* _bucket; - OutResGraph* _out_res_graph; + std::vector _dormant; - typedef RevGraphAdaptor RevGraph; - RevGraph* _rev_graph; - - typedef ResGraphAdaptor InResGraph; - typedef typename InResGraph::Edge InResEdge; - - InResGraph* _in_res_graph; - - typedef IterableBoolMap WakeMap; - WakeMap* _wake; - - typedef typename Graph::template NodeMap DistMap; - DistMap* _dist; + std::list > _sets; + std::list::iterator _highest; typedef typename Graph::template NodeMap ExcessMap; ExcessMap* _excess; @@ -133,15 +115,6 @@ typedef typename Graph::template NodeMap SourceSetMap; SourceSetMap* _source_set; - std::vector _level_size; - - int _highest_active; - std::vector > _active_nodes; - - int _dormant_max; - std::vector > _dormant; - - Value _min_cut; typedef typename Graph::template NodeMap MinCutMap; @@ -156,267 +129,695 @@ /// Constructor of the algorithm class. HaoOrlin(const Graph& graph, const CapacityMap& capacity, const Tolerance& tolerance = Tolerance()) : - _graph(&graph), _capacity(&capacity), - _preflow(0), _source(), _target(), - _out_res_graph(0), _rev_graph(0), _in_res_graph(0), - _wake(0),_dist(0), _excess(0), _source_set(0), - _highest_active(), _active_nodes(), _dormant_max(), _dormant(), - _min_cut(), _min_cut_map(0), _tolerance(tolerance) {} + _graph(graph), _capacity(&capacity), _flow(0), _source(), + _node_num(), _first(), _last(), _next(0), _prev(0), + _active(0), _bucket(0), _dormant(), _sets(), _highest(), + _excess(0), _source_set(0), _min_cut(), _min_cut_map(0), + _tolerance(tolerance) {} ~HaoOrlin() { if (_min_cut_map) { delete _min_cut_map; } - if (_in_res_graph) { - delete _in_res_graph; - } - if (_rev_graph) { - delete _rev_graph; - } - if (_out_res_graph) { - delete _out_res_graph; - } if (_source_set) { delete _source_set; } if (_excess) { delete _excess; } - if (_dist) { - delete _dist; + if (_next) { + delete _next; } - if (_wake) { - delete _wake; + if (_prev) { + delete _prev; } - if (_preflow) { - delete _preflow; + if (_active) { + delete _active; + } + if (_bucket) { + delete _bucket; + } + if (_flow) { + delete _flow; } } private: + + void activate(const Node& i) { + _active->set(i, true); + + int bucket = (*_bucket)[i]; + + if ((*_prev)[i] == INVALID || (*_active)[(*_prev)[i]]) return; + //unlace + _next->set((*_prev)[i], (*_next)[i]); + if ((*_next)[i] != INVALID) { + _prev->set((*_next)[i], (*_prev)[i]); + } else { + _last[bucket] = (*_prev)[i]; + } + //lace + _next->set(i, _first[bucket]); + _prev->set(_first[bucket], i); + _prev->set(i, INVALID); + _first[bucket] = i; + } + + void deactivate(const Node& i) { + _active->set(i, false); + int bucket = (*_bucket)[i]; + + if ((*_next)[i] == INVALID || !(*_active)[(*_next)[i]]) return; + + //unlace + _prev->set((*_next)[i], (*_prev)[i]); + if ((*_prev)[i] != INVALID) { + _next->set((*_prev)[i], (*_next)[i]); + } else { + _first[bucket] = (*_next)[i]; + } + //lace + _prev->set(i, _last[bucket]); + _next->set(_last[bucket], i); + _next->set(i, INVALID); + _last[bucket] = i; + } + + void addItem(const Node& i, int bucket) { + (*_bucket)[i] = bucket; + if (_last[bucket] != INVALID) { + _prev->set(i, _last[bucket]); + _next->set(_last[bucket], i); + _next->set(i, INVALID); + _last[bucket] = i; + } else { + _prev->set(i, INVALID); + _first[bucket] = i; + _next->set(i, INVALID); + _last[bucket] = i; + } + } - template - void findMinCut(const Node& target, bool out, ResGraph& res_graph) { - typedef typename ResGraph::Edge ResEdge; - typedef typename ResGraph::OutEdgeIt ResOutEdgeIt; + void findMinCutOut() { - for (typename Graph::EdgeIt it(*_graph); it != INVALID; ++it) { - (*_preflow)[it] = 0; - } - for (NodeIt it(*_graph); it != INVALID; ++it) { - (*_wake)[it] = true; - (*_dist)[it] = 1; - (*_excess)[it] = 0; - (*_source_set)[it] = false; + for (NodeIt n(_graph); n != INVALID; ++n) { + _excess->set(n, 0); } - _dormant[0].push_front(_source); - (*_source_set)[_source] = true; - _dormant_max = 0; - (*_wake)[_source] = false; - - _level_size[0] = 1; - _level_size[1] = _node_num - 1; - - _target = target; - (*_dist)[target] = 0; - - for (ResOutEdgeIt it(res_graph, _source); it != INVALID; ++it) { - Value delta = res_graph.rescap(it); - (*_excess)[_source] -= delta; - res_graph.augment(it, delta); - Node a = res_graph.target(it); - if ((*_excess)[a] == 0 && (*_wake)[a] && a != _target) { - _active_nodes[(*_dist)[a]].push_front(a); - if (_highest_active < (*_dist)[a]) { - _highest_active = (*_dist)[a]; - } - } - (*_excess)[a] += delta; + for (EdgeIt e(_graph); e != INVALID; ++e) { + _flow->set(e, 0); } + int bucket_num = 1; + + { + typename Graph::template NodeMap reached(_graph, false); + + reached.set(_source, true); - do { - Node n; - while ((n = findActiveNode()) != INVALID) { - for (ResOutEdgeIt e(res_graph, n); e != INVALID; ++e) { - Node a = res_graph.target(e); - if ((*_dist)[a] >= (*_dist)[n] || !(*_wake)[a]) continue; - Value delta = res_graph.rescap(e); - if (_tolerance.positive((*_excess)[n] - delta)) { - (*_excess)[n] -= delta; - } else { - delta = (*_excess)[n]; - (*_excess)[n] = 0; + bool first_set = true; + + for (NodeIt t(_graph); t != INVALID; ++t) { + if (reached[t]) continue; + _sets.push_front(std::list()); + _sets.front().push_front(bucket_num); + _dormant[bucket_num] = !first_set; + + _bucket->set(t, bucket_num); + _first[bucket_num] = _last[bucket_num] = t; + _next->set(t, INVALID); + _prev->set(t, INVALID); + + ++bucket_num; + + std::vector queue; + queue.push_back(t); + reached.set(t, true); + + while (!queue.empty()) { + _sets.front().push_front(bucket_num); + _dormant[bucket_num] = !first_set; + _first[bucket_num] = _last[bucket_num] = INVALID; + + std::vector nqueue; + for (int i = 0; i < int(queue.size()); ++i) { + Node n = queue[i]; + for (InEdgeIt e(_graph, n); e != INVALID; ++e) { + Node u = _graph.source(e); + if (!reached[u] && _tolerance.positive((*_capacity)[e])) { + reached.set(u, true); + addItem(u, bucket_num); + nqueue.push_back(u); + } + } } - res_graph.augment(e, delta); - if ((*_excess)[a] == 0 && a != _target) { - _active_nodes[(*_dist)[a]].push_front(a); - } - (*_excess)[a] += delta; - if ((*_excess)[n] == 0) break; + queue.swap(nqueue); + ++bucket_num; } - if ((*_excess)[n] != 0) { - relabel(n, res_graph); - } + _sets.front().pop_front(); + --bucket_num; + first_set = false; } - Value current_value = cutValue(out); - if (_min_cut > current_value){ - if (out) { - for (NodeIt it(*_graph); it != INVALID; ++it) { - _min_cut_map->set(it, !(*_wake)[it]); - } - } else { - for (NodeIt it(*_graph); it != INVALID; ++it) { - _min_cut_map->set(it, (*_wake)[it]); - } - } - - _min_cut = current_value; - } - - } while (selectNewSink(res_graph)); - } - - template - void relabel(const Node& n, ResGraph& res_graph) { - typedef typename ResGraph::OutEdgeIt ResOutEdgeIt; - - int k = (*_dist)[n]; - if (_level_size[k] == 1) { - ++_dormant_max; - for (NodeIt it(*_graph); it != INVALID; ++it) { - if ((*_wake)[it] && (*_dist)[it] >= k) { - (*_wake)[it] = false; - _dormant[_dormant_max].push_front(it); - --_level_size[(*_dist)[it]]; + _bucket->set(_source, 0); + _dormant[0] = true; + } + _source_set->set(_source, true); + + Node target = _last[_sets.back().back()]; + { + for (OutEdgeIt e(_graph, _source); e != INVALID; ++e) { + if (_tolerance.positive((*_capacity)[e])) { + Node u = _graph.target(e); + _flow->set(e, (*_capacity)[e]); + _excess->set(u, (*_excess)[u] + (*_capacity)[e]); + if (!(*_active)[u] && u != _source) { + activate(u); + } } } - --_highest_active; - } else { - int new_dist = _node_num; - for (ResOutEdgeIt e(res_graph, n); e != INVALID; ++e) { - Node t = res_graph.target(e); - if ((*_wake)[t] && new_dist > (*_dist)[t]) { - new_dist = (*_dist)[t]; - } - } - if (new_dist == _node_num) { - ++_dormant_max; - (*_wake)[n] = false; - _dormant[_dormant_max].push_front(n); - --_level_size[(*_dist)[n]]; - } else { - --_level_size[(*_dist)[n]]; - (*_dist)[n] = new_dist + 1; - _highest_active = (*_dist)[n]; - _active_nodes[_highest_active].push_front(n); - ++_level_size[(*_dist)[n]]; + if ((*_active)[target]) { + deactivate(target); + } + + _highest = _sets.back().begin(); + while (_highest != _sets.back().end() && + !(*_active)[_first[*_highest]]) { + ++_highest; + } + } + + + while (true) { + while (_highest != _sets.back().end()) { + Node n = _first[*_highest]; + Value excess = (*_excess)[n]; + int next_bucket = _node_num; + + int under_bucket; + if (++std::list::iterator(_highest) == _sets.back().end()) { + under_bucket = -1; + } else { + under_bucket = *(++std::list::iterator(_highest)); + } + + for (OutEdgeIt e(_graph, n); e != INVALID; ++e) { + Node v = _graph.target(e); + if (_dormant[(*_bucket)[v]]) continue; + Value rem = (*_capacity)[e] - (*_flow)[e]; + if (!_tolerance.positive(rem)) continue; + if ((*_bucket)[v] == under_bucket) { + if (!(*_active)[v] && v != target) { + activate(v); + } + if (!_tolerance.less(rem, excess)) { + _flow->set(e, (*_flow)[e] + excess); + _excess->set(v, (*_excess)[v] + excess); + excess = 0; + goto no_more_push; + } else { + excess -= rem; + _excess->set(v, (*_excess)[v] + rem); + _flow->set(e, (*_capacity)[e]); + } + } else if (next_bucket > (*_bucket)[v]) { + next_bucket = (*_bucket)[v]; + } + } + + for (InEdgeIt e(_graph, n); e != INVALID; ++e) { + Node v = _graph.source(e); + if (_dormant[(*_bucket)[v]]) continue; + Value rem = (*_flow)[e]; + if (!_tolerance.positive(rem)) continue; + if ((*_bucket)[v] == under_bucket) { + if (!(*_active)[v] && v != target) { + activate(v); + } + if (!_tolerance.less(rem, excess)) { + _flow->set(e, (*_flow)[e] - excess); + _excess->set(v, (*_excess)[v] + excess); + excess = 0; + goto no_more_push; + } else { + excess -= rem; + _excess->set(v, (*_excess)[v] + rem); + _flow->set(e, 0); + } + } else if (next_bucket > (*_bucket)[v]) { + next_bucket = (*_bucket)[v]; + } + } + + no_more_push: + + _excess->set(n, excess); + + if (excess != 0) { + if ((*_next)[n] == INVALID) { + typename std::list >::iterator new_set = + _sets.insert(--_sets.end(), std::list()); + new_set->splice(new_set->end(), _sets.back(), + _sets.back().begin(), ++_highest); + for (std::list::iterator it = new_set->begin(); + it != new_set->end(); ++it) { + _dormant[*it] = true; + } + while (_highest != _sets.back().end() && + !(*_active)[_first[*_highest]]) { + ++_highest; + } + } else if (next_bucket == _node_num) { + _first[(*_bucket)[n]] = (*_next)[n]; + _prev->set((*_next)[n], INVALID); + + std::list >::iterator new_set = + _sets.insert(--_sets.end(), std::list()); + + new_set->push_front(bucket_num); + _bucket->set(n, bucket_num); + _first[bucket_num] = _last[bucket_num] = n; + _next->set(n, INVALID); + _prev->set(n, INVALID); + _dormant[bucket_num] = true; + ++bucket_num; + + while (_highest != _sets.back().end() && + !(*_active)[_first[*_highest]]) { + ++_highest; + } + } else { + _first[*_highest] = (*_next)[n]; + _prev->set((*_next)[n], INVALID); + + while (next_bucket != *_highest) { + --_highest; + } + + if (_highest == _sets.back().begin()) { + _sets.back().push_front(bucket_num); + _dormant[bucket_num] = false; + _first[bucket_num] = _last[bucket_num] = INVALID; + ++bucket_num; + } + --_highest; + + _bucket->set(n, *_highest); + _next->set(n, _first[*_highest]); + if (_first[*_highest] != INVALID) { + _prev->set(_first[*_highest], n); + } else { + _last[*_highest] = n; + } + _first[*_highest] = n; + } + } else { + + deactivate(n); + if (!(*_active)[_first[*_highest]]) { + ++_highest; + if (_highest != _sets.back().end() && + !(*_active)[_first[*_highest]]) { + _highest = _sets.back().end(); + } + } + } + } + + if ((*_excess)[target] < _min_cut) { + _min_cut = (*_excess)[target]; + for (NodeIt i(_graph); i != INVALID; ++i) { + _min_cut_map->set(i, true); + } + for (std::list::iterator it = _sets.back().begin(); + it != _sets.back().end(); ++it) { + Node n = _first[*it]; + while (n != INVALID) { + _min_cut_map->set(n, false); + n = (*_next)[n]; + } + } + } + + { + Node new_target; + if ((*_prev)[target] != INVALID) { + _last[(*_bucket)[target]] = (*_prev)[target]; + _next->set((*_prev)[target], INVALID); + new_target = (*_prev)[target]; + } else { + _sets.back().pop_back(); + if (_sets.back().empty()) { + _sets.pop_back(); + if (_sets.empty()) + break; + for (std::list::iterator it = _sets.back().begin(); + it != _sets.back().end(); ++it) { + _dormant[*it] = false; + } + } + new_target = _last[_sets.back().back()]; + } + + _bucket->set(target, 0); + + _source_set->set(target, true); + for (OutEdgeIt e(_graph, target); e != INVALID; ++e) { + Value rem = (*_capacity)[e] - (*_flow)[e]; + if (!_tolerance.positive(rem)) continue; + Node v = _graph.target(e); + if (!(*_active)[v] && !(*_source_set)[v]) { + activate(v); + } + _excess->set(v, (*_excess)[v] + rem); + _flow->set(e, (*_capacity)[e]); + } + + for (InEdgeIt e(_graph, target); e != INVALID; ++e) { + Value rem = (*_flow)[e]; + if (!_tolerance.positive(rem)) continue; + Node v = _graph.source(e); + if (!(*_active)[v] && !(*_source_set)[v]) { + activate(v); + } + _excess->set(v, (*_excess)[v] + rem); + _flow->set(e, 0); + } + + target = new_target; + if ((*_active)[target]) { + deactivate(target); + } + + _highest = _sets.back().begin(); + while (_highest != _sets.back().end() && + !(*_active)[_first[*_highest]]) { + ++_highest; + } + } + } + } + + void findMinCutIn() { + + for (NodeIt n(_graph); n != INVALID; ++n) { + _excess->set(n, 0); + } + + for (EdgeIt e(_graph); e != INVALID; ++e) { + _flow->set(e, 0); + } + + int bucket_num = 1; + + { + typename Graph::template NodeMap reached(_graph, false); + + reached.set(_source, true); + + bool first_set = true; + + for (NodeIt t(_graph); t != INVALID; ++t) { + if (reached[t]) continue; + _sets.push_front(std::list()); + _sets.front().push_front(bucket_num); + _dormant[bucket_num] = !first_set; + + _bucket->set(t, bucket_num); + _first[bucket_num] = _last[bucket_num] = t; + _next->set(t, INVALID); + _prev->set(t, INVALID); + + ++bucket_num; + + std::vector queue; + queue.push_back(t); + reached.set(t, true); + + while (!queue.empty()) { + _sets.front().push_front(bucket_num); + _dormant[bucket_num] = !first_set; + _first[bucket_num] = _last[bucket_num] = INVALID; + + std::vector nqueue; + for (int i = 0; i < int(queue.size()); ++i) { + Node n = queue[i]; + for (OutEdgeIt e(_graph, n); e != INVALID; ++e) { + Node u = _graph.target(e); + if (!reached[u] && _tolerance.positive((*_capacity)[e])) { + reached.set(u, true); + addItem(u, bucket_num); + nqueue.push_back(u); + } + } + } + queue.swap(nqueue); + ++bucket_num; + } + _sets.front().pop_front(); + --bucket_num; + first_set = false; + } + + _bucket->set(_source, 0); + _dormant[0] = true; + } + _source_set->set(_source, true); + + Node target = _last[_sets.back().back()]; + { + for (InEdgeIt e(_graph, _source); e != INVALID; ++e) { + if (_tolerance.positive((*_capacity)[e])) { + Node u = _graph.source(e); + _flow->set(e, (*_capacity)[e]); + _excess->set(u, (*_excess)[u] + (*_capacity)[e]); + if (!(*_active)[u] && u != _source) { + activate(u); + } + } + } + if ((*_active)[target]) { + deactivate(target); + } + + _highest = _sets.back().begin(); + while (_highest != _sets.back().end() && + !(*_active)[_first[*_highest]]) { + ++_highest; + } + } + + + while (true) { + while (_highest != _sets.back().end()) { + Node n = _first[*_highest]; + Value excess = (*_excess)[n]; + int next_bucket = _node_num; + + int under_bucket; + if (++std::list::iterator(_highest) == _sets.back().end()) { + under_bucket = -1; + } else { + under_bucket = *(++std::list::iterator(_highest)); + } + + for (InEdgeIt e(_graph, n); e != INVALID; ++e) { + Node v = _graph.source(e); + if (_dormant[(*_bucket)[v]]) continue; + Value rem = (*_capacity)[e] - (*_flow)[e]; + if (!_tolerance.positive(rem)) continue; + if ((*_bucket)[v] == under_bucket) { + if (!(*_active)[v] && v != target) { + activate(v); + } + if (!_tolerance.less(rem, excess)) { + _flow->set(e, (*_flow)[e] + excess); + _excess->set(v, (*_excess)[v] + excess); + excess = 0; + goto no_more_push; + } else { + excess -= rem; + _excess->set(v, (*_excess)[v] + rem); + _flow->set(e, (*_capacity)[e]); + } + } else if (next_bucket > (*_bucket)[v]) { + next_bucket = (*_bucket)[v]; + } + } + + for (OutEdgeIt e(_graph, n); e != INVALID; ++e) { + Node v = _graph.target(e); + if (_dormant[(*_bucket)[v]]) continue; + Value rem = (*_flow)[e]; + if (!_tolerance.positive(rem)) continue; + if ((*_bucket)[v] == under_bucket) { + if (!(*_active)[v] && v != target) { + activate(v); + } + if (!_tolerance.less(rem, excess)) { + _flow->set(e, (*_flow)[e] - excess); + _excess->set(v, (*_excess)[v] + excess); + excess = 0; + goto no_more_push; + } else { + excess -= rem; + _excess->set(v, (*_excess)[v] + rem); + _flow->set(e, 0); + } + } else if (next_bucket > (*_bucket)[v]) { + next_bucket = (*_bucket)[v]; + } + } + + no_more_push: + + _excess->set(n, excess); + + if (excess != 0) { + if ((*_next)[n] == INVALID) { + typename std::list >::iterator new_set = + _sets.insert(--_sets.end(), std::list()); + new_set->splice(new_set->end(), _sets.back(), + _sets.back().begin(), ++_highest); + for (std::list::iterator it = new_set->begin(); + it != new_set->end(); ++it) { + _dormant[*it] = true; + } + while (_highest != _sets.back().end() && + !(*_active)[_first[*_highest]]) { + ++_highest; + } + } else if (next_bucket == _node_num) { + _first[(*_bucket)[n]] = (*_next)[n]; + _prev->set((*_next)[n], INVALID); + + std::list >::iterator new_set = + _sets.insert(--_sets.end(), std::list()); + + new_set->push_front(bucket_num); + _bucket->set(n, bucket_num); + _first[bucket_num] = _last[bucket_num] = n; + _next->set(n, INVALID); + _prev->set(n, INVALID); + _dormant[bucket_num] = true; + ++bucket_num; + + while (_highest != _sets.back().end() && + !(*_active)[_first[*_highest]]) { + ++_highest; + } + } else { + _first[*_highest] = (*_next)[n]; + _prev->set((*_next)[n], INVALID); + + while (next_bucket != *_highest) { + --_highest; + } + if (_highest == _sets.back().begin()) { + _sets.back().push_front(bucket_num); + _dormant[bucket_num] = false; + _first[bucket_num] = _last[bucket_num] = INVALID; + ++bucket_num; + } + --_highest; + + _bucket->set(n, *_highest); + _next->set(n, _first[*_highest]); + if (_first[*_highest] != INVALID) { + _prev->set(_first[*_highest], n); + } else { + _last[*_highest] = n; + } + _first[*_highest] = n; + } + } else { + + deactivate(n); + if (!(*_active)[_first[*_highest]]) { + ++_highest; + if (_highest != _sets.back().end() && + !(*_active)[_first[*_highest]]) { + _highest = _sets.back().end(); + } + } + } + } + + if ((*_excess)[target] < _min_cut) { + _min_cut = (*_excess)[target]; + for (NodeIt i(_graph); i != INVALID; ++i) { + _min_cut_map->set(i, false); + } + for (std::list::iterator it = _sets.back().begin(); + it != _sets.back().end(); ++it) { + Node n = _first[*it]; + while (n != INVALID) { + _min_cut_map->set(n, true); + n = (*_next)[n]; + } + } + } + + { + Node new_target; + if ((*_prev)[target] != INVALID) { + _last[(*_bucket)[target]] = (*_prev)[target]; + _next->set((*_prev)[target], INVALID); + new_target = (*_prev)[target]; + } else { + _sets.back().pop_back(); + if (_sets.back().empty()) { + _sets.pop_back(); + if (_sets.empty()) + break; + for (std::list::iterator it = _sets.back().begin(); + it != _sets.back().end(); ++it) { + _dormant[*it] = false; + } + } + new_target = _last[_sets.back().back()]; + } + + _bucket->set(target, 0); + + _source_set->set(target, true); + for (InEdgeIt e(_graph, target); e != INVALID; ++e) { + Value rem = (*_capacity)[e] - (*_flow)[e]; + if (!_tolerance.positive(rem)) continue; + Node v = _graph.source(e); + if (!(*_active)[v] && !(*_source_set)[v]) { + activate(v); + } + _excess->set(v, (*_excess)[v] + rem); + _flow->set(e, (*_capacity)[e]); + } + + for (OutEdgeIt e(_graph, target); e != INVALID; ++e) { + Value rem = (*_flow)[e]; + if (!_tolerance.positive(rem)) continue; + Node v = _graph.target(e); + if (!(*_active)[v] && !(*_source_set)[v]) { + activate(v); + } + _excess->set(v, (*_excess)[v] + rem); + _flow->set(e, 0); + } + + target = new_target; + if ((*_active)[target]) { + deactivate(target); + } + + _highest = _sets.back().begin(); + while (_highest != _sets.back().end() && + !(*_active)[_first[*_highest]]) { + ++_highest; + } } } } - template - bool selectNewSink(ResGraph& res_graph) { - typedef typename ResGraph::OutEdgeIt ResOutEdgeIt; - - Node old_target = _target; - (*_wake)[_target] = false; - --_level_size[(*_dist)[_target]]; - _dormant[0].push_front(_target); - (*_source_set)[_target] = true; - if (int(_dormant[0].size()) == _node_num){ - _dormant[0].clear(); - return false; - } - - bool wake_was_empty = false; - - if(_wake->trueNum() == 0) { - while (!_dormant[_dormant_max].empty()){ - (*_wake)[_dormant[_dormant_max].front()] = true; - ++_level_size[(*_dist)[_dormant[_dormant_max].front()]]; - _dormant[_dormant_max].pop_front(); - } - --_dormant_max; - wake_was_empty = true; - } - - int min_dist = std::numeric_limits::max(); - for (typename WakeMap::TrueIt it(*_wake); it != INVALID; ++it) { - if (min_dist > (*_dist)[it]){ - _target = it; - min_dist = (*_dist)[it]; - } - } - - if (wake_was_empty){ - for (typename WakeMap::TrueIt it(*_wake); it != INVALID; ++it) { - if ((*_excess)[it] != 0 && it != _target) { - _active_nodes[(*_dist)[it]].push_front(it); - if (_highest_active < (*_dist)[it]) { - _highest_active = (*_dist)[it]; - } - } - } - } - - Node n = old_target; - for (ResOutEdgeIt e(res_graph, n); e != INVALID; ++e){ - Node a = res_graph.target(e); - if (!(*_wake)[a]) continue; - Value delta = res_graph.rescap(e); - res_graph.augment(e, delta); - (*_excess)[n] -= delta; - if ((*_excess)[a] == 0 && (*_wake)[a] && a != _target) { - _active_nodes[(*_dist)[a]].push_front(a); - if (_highest_active < (*_dist)[a]) { - _highest_active = (*_dist)[a]; - } - } - (*_excess)[a] += delta; - } - - return true; - } - - Node findActiveNode() { - while (_highest_active > 0 && _active_nodes[_highest_active].empty()){ - --_highest_active; - } - if( _highest_active > 0) { - Node n = _active_nodes[_highest_active].front(); - _active_nodes[_highest_active].pop_front(); - return n; - } else { - return INVALID; - } - } - - Value cutValue(bool out) { - Value value = 0; - if (out) { - for (typename WakeMap::TrueIt it(*_wake); it != INVALID; ++it) { - for (InEdgeIt e(*_graph, it); e != INVALID; ++e) { - if (!(*_wake)[_graph->source(e)]){ - value += (*_capacity)[e]; - } - } - } - } else { - for (typename WakeMap::TrueIt it(*_wake); it != INVALID; ++it) { - for (OutEdgeIt e(*_graph, it); e != INVALID; ++e) { - if (!(*_wake)[_graph->target(e)]){ - value += (*_capacity)[e]; - } - } - } - } - return value; - } - - public: /// \name Execution control @@ -435,7 +836,7 @@ /// the maps, residual graph adaptors and some bucket structures /// for the algorithm. void init() { - init(NodeIt(*_graph)); + init(NodeIt(_graph)); } /// \brief Initializes the internal data structures. @@ -446,38 +847,37 @@ /// algorithm's source. void init(const Node& source) { _source = source; - _node_num = countNodes(*_graph); + + _node_num = countNodes(_graph); + + _first.resize(_node_num); + _last.resize(_node_num); _dormant.resize(_node_num); - _level_size.resize(_node_num, 0); - _active_nodes.resize(_node_num); - if (!_preflow) { - _preflow = new FlowMap(*_graph); + if (!_flow) { + _flow = new FlowMap(_graph); } - if (!_wake) { - _wake = new WakeMap(*_graph); + if (!_next) { + _next = new typename Graph::template NodeMap(_graph); } - if (!_dist) { - _dist = new DistMap(*_graph); + if (!_prev) { + _prev = new typename Graph::template NodeMap(_graph); + } + if (!_active) { + _active = new typename Graph::template NodeMap(_graph); + } + if (!_bucket) { + _bucket = new typename Graph::template NodeMap(_graph); } if (!_excess) { - _excess = new ExcessMap(*_graph); + _excess = new ExcessMap(_graph); } if (!_source_set) { - _source_set = new SourceSetMap(*_graph); - } - if (!_out_res_graph) { - _out_res_graph = new OutResGraph(*_graph, *_capacity, *_preflow); - } - if (!_rev_graph) { - _rev_graph = new RevGraph(*_graph); - } - if (!_in_res_graph) { - _in_res_graph = new InResGraph(*_rev_graph, *_capacity, *_preflow); + _source_set = new SourceSetMap(_graph); } if (!_min_cut_map) { - _min_cut_map = new MinCutMap(*_graph); + _min_cut_map = new MinCutMap(_graph); } _min_cut = std::numeric_limits::max(); @@ -487,56 +887,23 @@ /// \brief Calculates a minimum cut with \f$ source \f$ on the /// source-side. /// - /// \brief Calculates a minimum cut with \f$ source \f$ on the - /// source-side (i.e. a set \f$ X\subsetneq V \f$ with \f$ source \in X \f$ - /// and minimal out-degree). + /// Calculates a minimum cut with \f$ source \f$ on the + /// source-side (i.e. a set \f$ X\subsetneq V \f$ with \f$ source + /// \in X \f$ and minimal out-degree). void calculateOut() { - for (NodeIt it(*_graph); it != INVALID; ++it) { - if (it != _source) { - calculateOut(it); - return; - } - } + findMinCutOut(); } /// \brief Calculates a minimum cut with \f$ source \f$ on the - /// source-side. + /// target-side. /// - /// \brief Calculates a minimum cut with \f$ source \f$ on the - /// source-side (i.e. a set \f$ X\subsetneq V \f$ with \f$ source \in X \f$ - /// and minimal out-degree). The \c target is the initial target - /// for the push-relabel algorithm. - void calculateOut(const Node& target) { - findMinCut(target, true, *_out_res_graph); + /// Calculates a minimum cut with \f$ source \f$ on the + /// target-side (i.e. a set \f$ X\subsetneq V \f$ with \f$ source + /// \in X \f$ and minimal out-degree). + void calculateIn() { + findMinCutIn(); } - /// \brief Calculates a minimum cut with \f$ source \f$ on the - /// sink-side. - /// - /// \brief Calculates a minimum cut with \f$ source \f$ on the - /// sink-side (i.e. a set \f$ X\subsetneq V \f$ with - /// \f$ source \notin X \f$ - /// and minimal out-degree). - void calculateIn() { - for (NodeIt it(*_graph); it != INVALID; ++it) { - if (it != _source) { - calculateIn(it); - return; - } - } - } - - /// \brief Calculates a minimum cut with \f$ source \f$ on the - /// sink-side. - /// - /// \brief Calculates a minimum cut with \f$ source \f$ on the - /// sink-side (i.e. a set \f$ X\subsetneq V - /// \f$ with \f$ source \notin X \f$ and minimal out-degree). - /// The \c target is the initial - /// target for the push-relabel algorithm. - void calculateIn(const Node& target) { - findMinCut(target, false, *_in_res_graph); - } /// \brief Runs the algorithm. /// @@ -545,13 +912,8 @@ /// and \ref calculateIn(). void run() { init(); - for (NodeIt it(*_graph); it != INVALID; ++it) { - if (it != _source) { - calculateOut(it); - calculateIn(it); - return; - } - } + calculateOut(); + calculateIn(); } /// \brief Runs the algorithm. @@ -561,23 +923,8 @@ /// calculateOut() and \ref calculateIn(). void run(const Node& s) { init(s); - for (NodeIt it(*_graph); it != INVALID; ++it) { - if (it != _source) { - calculateOut(it); - calculateIn(it); - return; - } - } - } - - /// \brief Runs the algorithm. - /// - /// Runs the algorithm. It just calls the \ref init() and then - /// \ref calculateOut() and \ref calculateIn(). - void run(const Node& s, const Node& t) { - init(s); - calculateOut(t); - calculateIn(t); + calculateOut(); + calculateIn(); } /// @} @@ -608,7 +955,7 @@ /// bool-valued node-map. template Value minCut(NodeMap& nodeMap) const { - for (NodeIt it(*_graph); it != INVALID; ++it) { + for (NodeIt it(_graph); it != INVALID; ++it) { nodeMap.set(it, (*_min_cut_map)[it]); } return minCut(); diff -r 93de38566e6c -r f86f7e4eb2ba lemon/nagamochi_ibaraki.h --- a/lemon/nagamochi_ibaraki.h Fri Nov 30 09:22:38 2007 +0000 +++ b/lemon/nagamochi_ibaraki.h Tue Dec 04 10:55:27 2007 +0000 @@ -847,9 +847,9 @@ /// distinict subnetwork. /// /// The complexity of the algorithm is \f$ O(ne\log(n)) \f$ but with - /// Fibonacci heap it can be decreased to \f$ O(ne+n^2\log(n)) \f$. - /// When capacity map is neutral then it uses BucketHeap which - /// results \f$ O(ne) \f$ time complexity. + /// Fibonacci heap it can be decreased to \f$ O(ne+n^2\log(n)) \f$. + /// When unit capacity minimum cut is computed then it uses + /// BucketHeap which results \f$ O(ne) \f$ time complexity. /// /// \warning The value type of the capacity map should be able to hold /// any cut value of the graph, otherwise the result can overflow. @@ -906,7 +906,7 @@ ///@{ - struct DefNeutralCapacityTraits : public Traits { + struct DefUnitCapacityTraits : public Traits { typedef ConstMap > CapacityMap; static CapacityMap *createCapacityMap(const Graph&) { return new CapacityMap(); @@ -917,11 +917,11 @@ /// /// \ref named-templ-param "Named parameter" for setting /// the capacity type to constMap() - struct DefNeutralCapacity + struct DefUnitCapacity : public NagamochiIbaraki { + DefUnitCapacityTraits> { typedef NagamochiIbaraki Create; + DefUnitCapacityTraits> Create; }; @@ -1134,7 +1134,7 @@ /// /// This constructor can be used only when the Traits class /// defines how can we instantiate a local capacity map. - /// If the DefNeutralCapacity used the algorithm automatically + /// If the DefUnitCapacity used the algorithm automatically /// construct the capacity map. /// ///\param graph the graph the algorithm will run on.