gravatar
alpar (Alpar Juttner)
alpar@cs.elte.hu
Doc improvements in HaoOrlin (#398)
0 1 0
default
1 file changed with 21 insertions and 11 deletions:
↑ Collapse diff ↑
Ignore white space 384 line context
1 1
/* -*- mode: C++; indent-tabs-mode: nil; -*-
2 2
 *
3 3
 * This file is a part of LEMON, a generic C++ optimization library.
4 4
 *
5 5
 * Copyright (C) 2003-2010
6 6
 * Egervary Jeno Kombinatorikus Optimalizalasi Kutatocsoport
7 7
 * (Egervary Research Group on Combinatorial Optimization, EGRES).
8 8
 *
9 9
 * Permission to use, modify and distribute this software is granted
10 10
 * provided that this copyright notice appears in all copies. For
11 11
 * precise terms see the accompanying LICENSE file.
12 12
 *
13 13
 * This software is provided "AS IS" with no warranty of any kind,
14 14
 * express or implied, and with no claim as to its suitability for any
15 15
 * purpose.
16 16
 *
17 17
 */
18 18

	
19 19
#ifndef LEMON_HAO_ORLIN_H
20 20
#define LEMON_HAO_ORLIN_H
21 21

	
22 22
#include <vector>
23 23
#include <list>
24 24
#include <limits>
25 25

	
26 26
#include <lemon/maps.h>
27 27
#include <lemon/core.h>
28 28
#include <lemon/tolerance.h>
29 29

	
30 30
/// \file
31 31
/// \ingroup min_cut
32 32
/// \brief Implementation of the Hao-Orlin algorithm.
33 33
///
34 34
/// Implementation of the Hao-Orlin algorithm for finding a minimum cut
35 35
/// in a digraph.
36 36

	
37 37
namespace lemon {
38 38

	
39 39
  /// \ingroup min_cut
40 40
  ///
41 41
  /// \brief Hao-Orlin algorithm for finding a minimum cut in a digraph.
42 42
  ///
43 43
  /// This class implements the Hao-Orlin algorithm for finding a minimum
44 44
  /// value cut in a directed graph \f$D=(V,A)\f$.
45 45
  /// It takes a fixed node \f$ source \in V \f$ and
46 46
  /// consists of two phases: in the first phase it determines a
47 47
  /// minimum cut with \f$ source \f$ on the source-side (i.e. a set
48 48
  /// \f$ X\subsetneq V \f$ with \f$ source \in X \f$ and minimal outgoing
49 49
  /// capacity) and in the second phase it determines a minimum cut
50 50
  /// with \f$ source \f$ on the sink-side (i.e. a set
51 51
  /// \f$ X\subsetneq V \f$ with \f$ source \notin X \f$ and minimal outgoing
52 52
  /// capacity). Obviously, the smaller of these two cuts will be a
53 53
  /// minimum cut of \f$ D \f$. The algorithm is a modified
54 54
  /// preflow push-relabel algorithm. Our implementation calculates
55 55
  /// the minimum cut in \f$ O(n^2\sqrt{m}) \f$ time (we use the
56
  /// highest-label rule), or in \f$O(nm)\f$ for unit capacities. The
57
  /// purpose of such algorithm is e.g. testing network reliability.
56
  /// highest-label rule), or in \f$O(nm)\f$ for unit capacities. A notable
57
  /// use of this algorithm is testing network reliability.
58 58
  ///
59 59
  /// For an undirected graph you can run just the first phase of the
60 60
  /// algorithm or you can use the algorithm of Nagamochi and Ibaraki,
61 61
  /// which solves the undirected problem in \f$ O(nm + n^2 \log n) \f$
62 62
  /// time. It is implemented in the NagamochiIbaraki algorithm class.
63 63
  ///
64 64
  /// \tparam GR The type of the digraph the algorithm runs on.
65 65
  /// \tparam CAP The type of the arc map containing the capacities,
66 66
  /// which can be any numreric type. The default map type is
67 67
  /// \ref concepts::Digraph::ArcMap "GR::ArcMap<int>".
68 68
  /// \tparam TOL Tolerance class for handling inexact computations. The
69 69
  /// default tolerance type is \ref Tolerance "Tolerance<CAP::Value>".
70 70
#ifdef DOXYGEN
71 71
  template <typename GR, typename CAP, typename TOL>
72 72
#else
73 73
  template <typename GR,
74 74
            typename CAP = typename GR::template ArcMap<int>,
75 75
            typename TOL = Tolerance<typename CAP::Value> >
76 76
#endif
77 77
  class HaoOrlin {
78 78
  public:
79 79

	
80 80
    /// The digraph type of the algorithm
81 81
    typedef GR Digraph;
82 82
    /// The capacity map type of the algorithm
83 83
    typedef CAP CapacityMap;
84 84
    /// The tolerance type of the algorithm
85 85
    typedef TOL Tolerance;
86 86

	
87 87
  private:
88 88

	
89 89
    typedef typename CapacityMap::Value Value;
90 90

	
91 91
    TEMPLATE_DIGRAPH_TYPEDEFS(Digraph);
92 92

	
93 93
    const Digraph& _graph;
94 94
    const CapacityMap* _capacity;
95 95

	
96 96
    typedef typename Digraph::template ArcMap<Value> FlowMap;
97 97
    FlowMap* _flow;
98 98

	
99 99
    Node _source;
100 100

	
101 101
    int _node_num;
102 102

	
103 103
    // Bucketing structure
104 104
    std::vector<Node> _first, _last;
105 105
    typename Digraph::template NodeMap<Node>* _next;
106 106
    typename Digraph::template NodeMap<Node>* _prev;
107 107
    typename Digraph::template NodeMap<bool>* _active;
108 108
    typename Digraph::template NodeMap<int>* _bucket;
109 109

	
110 110
    std::vector<bool> _dormant;
111 111

	
112 112
    std::list<std::list<int> > _sets;
113 113
    std::list<int>::iterator _highest;
114 114

	
115 115
    typedef typename Digraph::template NodeMap<Value> ExcessMap;
116 116
    ExcessMap* _excess;
117 117

	
118 118
    typedef typename Digraph::template NodeMap<bool> SourceSetMap;
119 119
    SourceSetMap* _source_set;
120 120

	
121 121
    Value _min_cut;
122 122

	
123 123
    typedef typename Digraph::template NodeMap<bool> MinCutMap;
124 124
    MinCutMap* _min_cut_map;
125 125

	
126 126
    Tolerance _tolerance;
127 127

	
128 128
  public:
129 129

	
130 130
    /// \brief Constructor
131 131
    ///
132 132
    /// Constructor of the algorithm class.
133 133
    HaoOrlin(const Digraph& graph, const CapacityMap& capacity,
134 134
             const Tolerance& tolerance = Tolerance()) :
135 135
      _graph(graph), _capacity(&capacity), _flow(0), _source(),
136 136
      _node_num(), _first(), _last(), _next(0), _prev(0),
137 137
      _active(0), _bucket(0), _dormant(), _sets(), _highest(),
138 138
      _excess(0), _source_set(0), _min_cut(), _min_cut_map(0),
139 139
      _tolerance(tolerance) {}
140 140

	
141 141
    ~HaoOrlin() {
142 142
      if (_min_cut_map) {
143 143
        delete _min_cut_map;
144 144
      }
145 145
      if (_source_set) {
146 146
        delete _source_set;
147 147
      }
148 148
      if (_excess) {
149 149
        delete _excess;
150 150
      }
151 151
      if (_next) {
152 152
        delete _next;
153 153
      }
154 154
      if (_prev) {
155 155
        delete _prev;
156 156
      }
157 157
      if (_active) {
158 158
        delete _active;
159 159
      }
160 160
      if (_bucket) {
161 161
        delete _bucket;
162 162
      }
163 163
      if (_flow) {
164 164
        delete _flow;
165 165
      }
166 166
    }
167 167

	
168 168
    /// \brief Set the tolerance used by the algorithm.
169 169
    ///
170 170
    /// This function sets the tolerance object used by the algorithm.
171 171
    /// \return <tt>(*this)</tt>
172 172
    HaoOrlin& tolerance(const Tolerance& tolerance) {
173 173
      _tolerance = tolerance;
174 174
      return *this;
175 175
    }
176 176

	
177 177
    /// \brief Returns a const reference to the tolerance.
178 178
    ///
179 179
    /// This function returns a const reference to the tolerance object
180 180
    /// used by the algorithm.
181 181
    const Tolerance& tolerance() const {
182 182
      return _tolerance;
183 183
    }
184 184

	
185 185
  private:
186 186

	
187 187
    void activate(const Node& i) {
188 188
      (*_active)[i] = true;
189 189

	
190 190
      int bucket = (*_bucket)[i];
191 191

	
192 192
      if ((*_prev)[i] == INVALID || (*_active)[(*_prev)[i]]) return;
193 193
      //unlace
194 194
      (*_next)[(*_prev)[i]] = (*_next)[i];
195 195
      if ((*_next)[i] != INVALID) {
196 196
        (*_prev)[(*_next)[i]] = (*_prev)[i];
197 197
      } else {
198 198
        _last[bucket] = (*_prev)[i];
199 199
      }
200 200
      //lace
201 201
      (*_next)[i] = _first[bucket];
202 202
      (*_prev)[_first[bucket]] = i;
203 203
      (*_prev)[i] = INVALID;
204 204
      _first[bucket] = i;
205 205
    }
206 206

	
207 207
    void deactivate(const Node& i) {
208 208
      (*_active)[i] = false;
209 209
      int bucket = (*_bucket)[i];
210 210

	
211 211
      if ((*_next)[i] == INVALID || !(*_active)[(*_next)[i]]) return;
212 212

	
213 213
      //unlace
214 214
      (*_prev)[(*_next)[i]] = (*_prev)[i];
215 215
      if ((*_prev)[i] != INVALID) {
216 216
        (*_next)[(*_prev)[i]] = (*_next)[i];
217 217
      } else {
218 218
        _first[bucket] = (*_next)[i];
219 219
      }
220 220
      //lace
221 221
      (*_prev)[i] = _last[bucket];
222 222
      (*_next)[_last[bucket]] = i;
223 223
      (*_next)[i] = INVALID;
224 224
      _last[bucket] = i;
225 225
    }
226 226

	
227 227
    void addItem(const Node& i, int bucket) {
228 228
      (*_bucket)[i] = bucket;
229 229
      if (_last[bucket] != INVALID) {
230 230
        (*_prev)[i] = _last[bucket];
231 231
        (*_next)[_last[bucket]] = i;
232 232
        (*_next)[i] = INVALID;
233 233
        _last[bucket] = i;
234 234
      } else {
235 235
        (*_prev)[i] = INVALID;
236 236
        _first[bucket] = i;
237 237
        (*_next)[i] = INVALID;
238 238
        _last[bucket] = i;
239 239
      }
240 240
    }
241 241

	
242 242
    void findMinCutOut() {
243 243

	
244 244
      for (NodeIt n(_graph); n != INVALID; ++n) {
245 245
        (*_excess)[n] = 0;
246 246
        (*_source_set)[n] = false;
247 247
      }
248 248

	
249 249
      for (ArcIt a(_graph); a != INVALID; ++a) {
... ...
@@ -723,283 +723,293 @@
723 723
              _first[*_highest] = (*_next)[n];
724 724
              (*_prev)[(*_next)[n]] = INVALID;
725 725

	
726 726
              while (next_bucket != *_highest) {
727 727
                --_highest;
728 728
              }
729 729
              if (_highest == _sets.back().begin()) {
730 730
                _sets.back().push_front(bucket_num);
731 731
                _dormant[bucket_num] = false;
732 732
                _first[bucket_num] = _last[bucket_num] = INVALID;
733 733
                ++bucket_num;
734 734
              }
735 735
              --_highest;
736 736

	
737 737
              (*_bucket)[n] = *_highest;
738 738
              (*_next)[n] = _first[*_highest];
739 739
              if (_first[*_highest] != INVALID) {
740 740
                (*_prev)[_first[*_highest]] = n;
741 741
              } else {
742 742
                _last[*_highest] = n;
743 743
              }
744 744
              _first[*_highest] = n;
745 745
            }
746 746
          } else {
747 747

	
748 748
            deactivate(n);
749 749
            if (!(*_active)[_first[*_highest]]) {
750 750
              ++_highest;
751 751
              if (_highest != _sets.back().end() &&
752 752
                  !(*_active)[_first[*_highest]]) {
753 753
                _highest = _sets.back().end();
754 754
              }
755 755
            }
756 756
          }
757 757
        }
758 758

	
759 759
        if ((*_excess)[target] < _min_cut) {
760 760
          _min_cut = (*_excess)[target];
761 761
          for (NodeIt i(_graph); i != INVALID; ++i) {
762 762
            (*_min_cut_map)[i] = false;
763 763
          }
764 764
          for (std::list<int>::iterator it = _sets.back().begin();
765 765
               it != _sets.back().end(); ++it) {
766 766
            Node n = _first[*it];
767 767
            while (n != INVALID) {
768 768
              (*_min_cut_map)[n] = true;
769 769
              n = (*_next)[n];
770 770
            }
771 771
          }
772 772
        }
773 773

	
774 774
        {
775 775
          Node new_target;
776 776
          if ((*_prev)[target] != INVALID || (*_next)[target] != INVALID) {
777 777
            if ((*_next)[target] == INVALID) {
778 778
              _last[(*_bucket)[target]] = (*_prev)[target];
779 779
              new_target = (*_prev)[target];
780 780
            } else {
781 781
              (*_prev)[(*_next)[target]] = (*_prev)[target];
782 782
              new_target = (*_next)[target];
783 783
            }
784 784
            if ((*_prev)[target] == INVALID) {
785 785
              _first[(*_bucket)[target]] = (*_next)[target];
786 786
            } else {
787 787
              (*_next)[(*_prev)[target]] = (*_next)[target];
788 788
            }
789 789
          } else {
790 790
            _sets.back().pop_back();
791 791
            if (_sets.back().empty()) {
792 792
              _sets.pop_back();
793 793
              if (_sets.empty())
794 794
                break;
795 795
              for (std::list<int>::iterator it = _sets.back().begin();
796 796
                   it != _sets.back().end(); ++it) {
797 797
                _dormant[*it] = false;
798 798
              }
799 799
            }
800 800
            new_target = _last[_sets.back().back()];
801 801
          }
802 802

	
803 803
          (*_bucket)[target] = 0;
804 804

	
805 805
          (*_source_set)[target] = true;
806 806
          for (InArcIt a(_graph, target); a != INVALID; ++a) {
807 807
            Value rem = (*_capacity)[a] - (*_flow)[a];
808 808
            if (!_tolerance.positive(rem)) continue;
809 809
            Node v = _graph.source(a);
810 810
            if (!(*_active)[v] && !(*_source_set)[v]) {
811 811
              activate(v);
812 812
            }
813 813
            (*_excess)[v] += rem;
814 814
            (*_flow)[a] = (*_capacity)[a];
815 815
          }
816 816

	
817 817
          for (OutArcIt a(_graph, target); a != INVALID; ++a) {
818 818
            Value rem = (*_flow)[a];
819 819
            if (!_tolerance.positive(rem)) continue;
820 820
            Node v = _graph.target(a);
821 821
            if (!(*_active)[v] && !(*_source_set)[v]) {
822 822
              activate(v);
823 823
            }
824 824
            (*_excess)[v] += rem;
825 825
            (*_flow)[a] = 0;
826 826
          }
827 827

	
828 828
          target = new_target;
829 829
          if ((*_active)[target]) {
830 830
            deactivate(target);
831 831
          }
832 832

	
833 833
          _highest = _sets.back().begin();
834 834
          while (_highest != _sets.back().end() &&
835 835
                 !(*_active)[_first[*_highest]]) {
836 836
            ++_highest;
837 837
          }
838 838
        }
839 839
      }
840 840
    }
841 841

	
842 842
  public:
843 843

	
844 844
    /// \name Execution Control
845 845
    /// The simplest way to execute the algorithm is to use
846 846
    /// one of the member functions called \ref run().
847 847
    /// \n
848 848
    /// If you need better control on the execution,
849 849
    /// you have to call one of the \ref init() functions first, then
850 850
    /// \ref calculateOut() and/or \ref calculateIn().
851 851

	
852 852
    /// @{
853 853

	
854 854
    /// \brief Initialize the internal data structures.
855 855
    ///
856 856
    /// This function initializes the internal data structures. It creates
857 857
    /// the maps and some bucket structures for the algorithm.
858 858
    /// The first node is used as the source node for the push-relabel
859 859
    /// algorithm.
860 860
    void init() {
861 861
      init(NodeIt(_graph));
862 862
    }
863 863

	
864 864
    /// \brief Initialize the internal data structures.
865 865
    ///
866 866
    /// This function initializes the internal data structures. It creates
867 867
    /// the maps and some bucket structures for the algorithm.
868 868
    /// The given node is used as the source node for the push-relabel
869 869
    /// algorithm.
870 870
    void init(const Node& source) {
871 871
      _source = source;
872 872

	
873 873
      _node_num = countNodes(_graph);
874 874

	
875 875
      _first.resize(_node_num);
876 876
      _last.resize(_node_num);
877 877

	
878 878
      _dormant.resize(_node_num);
879 879

	
880 880
      if (!_flow) {
881 881
        _flow = new FlowMap(_graph);
882 882
      }
883 883
      if (!_next) {
884 884
        _next = new typename Digraph::template NodeMap<Node>(_graph);
885 885
      }
886 886
      if (!_prev) {
887 887
        _prev = new typename Digraph::template NodeMap<Node>(_graph);
888 888
      }
889 889
      if (!_active) {
890 890
        _active = new typename Digraph::template NodeMap<bool>(_graph);
891 891
      }
892 892
      if (!_bucket) {
893 893
        _bucket = new typename Digraph::template NodeMap<int>(_graph);
894 894
      }
895 895
      if (!_excess) {
896 896
        _excess = new ExcessMap(_graph);
897 897
      }
898 898
      if (!_source_set) {
899 899
        _source_set = new SourceSetMap(_graph);
900 900
      }
901 901
      if (!_min_cut_map) {
902 902
        _min_cut_map = new MinCutMap(_graph);
903 903
      }
904 904

	
905 905
      _min_cut = std::numeric_limits<Value>::max();
906 906
    }
907 907

	
908 908

	
909 909
    /// \brief Calculate a minimum cut with \f$ source \f$ on the
910 910
    /// source-side.
911 911
    ///
912 912
    /// This function calculates a minimum cut with \f$ source \f$ on the
913 913
    /// source-side (i.e. a set \f$ X\subsetneq V \f$ with
914 914
    /// \f$ source \in X \f$ and minimal outgoing capacity).
915
    /// It updates the stored cut if (and only if) the newly found one
916
    /// is better.
915 917
    ///
916 918
    /// \pre \ref init() must be called before using this function.
917 919
    void calculateOut() {
918 920
      findMinCutOut();
919 921
    }
920 922

	
921 923
    /// \brief Calculate a minimum cut with \f$ source \f$ on the
922 924
    /// sink-side.
923 925
    ///
924 926
    /// This function calculates a minimum cut with \f$ source \f$ on the
925 927
    /// sink-side (i.e. a set \f$ X\subsetneq V \f$ with
926 928
    /// \f$ source \notin X \f$ and minimal outgoing capacity).
929
    /// It updates the stored cut if (and only if) the newly found one
930
    /// is better.
927 931
    ///
928 932
    /// \pre \ref init() must be called before using this function.
929 933
    void calculateIn() {
930 934
      findMinCutIn();
931 935
    }
932 936

	
933 937

	
934 938
    /// \brief Run the algorithm.
935 939
    ///
936
    /// This function runs the algorithm. It finds nodes \c source and
937
    /// \c target arbitrarily and then calls \ref init(), \ref calculateOut()
940
    /// This function runs the algorithm. It chooses source node,
941
    /// then calls \ref init(), \ref calculateOut()
938 942
    /// and \ref calculateIn().
939 943
    void run() {
940 944
      init();
941 945
      calculateOut();
942 946
      calculateIn();
943 947
    }
944 948

	
945 949
    /// \brief Run the algorithm.
946 950
    ///
947
    /// This function runs the algorithm. It uses the given \c source node,
948
    /// finds a proper \c target node and then calls the \ref init(),
949
    /// \ref calculateOut() and \ref calculateIn().
951
    /// This function runs the algorithm. It calls \ref init(),
952
    /// \ref calculateOut() and \ref calculateIn() with the given
953
    /// source node.
950 954
    void run(const Node& s) {
951 955
      init(s);
952 956
      calculateOut();
953 957
      calculateIn();
954 958
    }
955 959

	
956 960
    /// @}
957 961

	
958 962
    /// \name Query Functions
959 963
    /// The result of the %HaoOrlin algorithm
960 964
    /// can be obtained using these functions.\n
961 965
    /// \ref run(), \ref calculateOut() or \ref calculateIn()
962 966
    /// should be called before using them.
963 967

	
964 968
    /// @{
965 969

	
966 970
    /// \brief Return the value of the minimum cut.
967 971
    ///
968
    /// This function returns the value of the minimum cut.
972
    /// This function returns the value of the best cut found by the
973
    /// previously called \ref run(), \ref calculateOut() or \ref
974
    /// calculateIn().
969 975
    ///
970 976
    /// \pre \ref run(), \ref calculateOut() or \ref calculateIn()
971 977
    /// must be called before using this function.
972 978
    Value minCutValue() const {
973 979
      return _min_cut;
974 980
    }
975 981

	
976 982

	
977 983
    /// \brief Return a minimum cut.
978 984
    ///
979
    /// This function sets \c cutMap to the characteristic vector of a
980
    /// minimum value cut: it will give a non-empty set \f$ X\subsetneq V \f$
981
    /// with minimal outgoing capacity (i.e. \c cutMap will be \c true exactly
985
    /// This function gives the best cut found by the
986
    /// previously called \ref run(), \ref calculateOut() or \ref
987
    /// calculateIn().
988
    ///
989
    /// It sets \c cutMap to the characteristic vector of the found
990
    /// minimum value cut - a non-empty set \f$ X\subsetneq V \f$
991
    /// of minimum outgoing capacity (i.e. \c cutMap will be \c true exactly
982 992
    /// for the nodes of \f$ X \f$).
983 993
    ///
984 994
    /// \param cutMap A \ref concepts::WriteMap "writable" node map with
985 995
    /// \c bool (or convertible) value type.
986 996
    ///
987 997
    /// \return The value of the minimum cut.
988 998
    ///
989 999
    /// \pre \ref run(), \ref calculateOut() or \ref calculateIn()
990 1000
    /// must be called before using this function.
991 1001
    template <typename CutMap>
992 1002
    Value minCutMap(CutMap& cutMap) const {
993 1003
      for (NodeIt it(_graph); it != INVALID; ++it) {
994 1004
        cutMap.set(it, (*_min_cut_map)[it]);
995 1005
      }
996 1006
      return _min_cut;
997 1007
    }
998 1008

	
999 1009
    /// @}
1000 1010

	
1001 1011
  }; //class HaoOrlin
1002 1012

	
1003 1013
} //namespace lemon
1004 1014

	
1005 1015
#endif //LEMON_HAO_ORLIN_H
0 comments (0 inline)