0
9
0
1 | 1 |
/* -*- mode: C++; indent-tabs-mode: nil; -*- |
2 | 2 |
* |
3 | 3 |
* This file is a part of LEMON, a generic C++ optimization library. |
4 | 4 |
* |
5 | 5 |
* Copyright (C) 2003-2009 |
6 | 6 |
* Egervary Jeno Kombinatorikus Optimalizalasi Kutatocsoport |
7 | 7 |
* (Egervary Research Group on Combinatorial Optimization, EGRES). |
8 | 8 |
* |
9 | 9 |
* Permission to use, modify and distribute this software is granted |
10 | 10 |
* provided that this copyright notice appears in all copies. For |
11 | 11 |
* precise terms see the accompanying LICENSE file. |
12 | 12 |
* |
13 | 13 |
* This software is provided "AS IS" with no warranty of any kind, |
14 | 14 |
* express or implied, and with no claim as to its suitability for any |
15 | 15 |
* purpose. |
16 | 16 |
* |
17 | 17 |
*/ |
18 | 18 |
|
19 | 19 |
#ifndef LEMON_CIRCULATION_H |
20 | 20 |
#define LEMON_CIRCULATION_H |
21 | 21 |
|
22 | 22 |
#include <lemon/tolerance.h> |
23 | 23 |
#include <lemon/elevator.h> |
24 | 24 |
|
25 | 25 |
///\ingroup max_flow |
26 | 26 |
///\file |
27 | 27 |
///\brief Push-relabel algorithm for finding a feasible circulation. |
28 | 28 |
/// |
29 | 29 |
namespace lemon { |
30 | 30 |
|
31 | 31 |
/// \brief Default traits class of Circulation class. |
32 | 32 |
/// |
33 | 33 |
/// Default traits class of Circulation class. |
34 | 34 |
/// \tparam GR Digraph type. |
35 | 35 |
/// \tparam LM Lower bound capacity map type. |
36 | 36 |
/// \tparam UM Upper bound capacity map type. |
37 | 37 |
/// \tparam DM Delta map type. |
38 | 38 |
template <typename GR, typename LM, |
39 | 39 |
typename UM, typename DM> |
40 | 40 |
struct CirculationDefaultTraits { |
41 | 41 |
|
42 | 42 |
/// \brief The type of the digraph the algorithm runs on. |
43 | 43 |
typedef GR Digraph; |
44 | 44 |
|
45 | 45 |
/// \brief The type of the map that stores the circulation lower |
46 | 46 |
/// bound. |
47 | 47 |
/// |
48 | 48 |
/// The type of the map that stores the circulation lower bound. |
49 | 49 |
/// It must meet the \ref concepts::ReadMap "ReadMap" concept. |
50 | 50 |
typedef LM LCapMap; |
51 | 51 |
|
52 | 52 |
/// \brief The type of the map that stores the circulation upper |
53 | 53 |
/// bound. |
54 | 54 |
/// |
55 | 55 |
/// The type of the map that stores the circulation upper bound. |
56 | 56 |
/// It must meet the \ref concepts::ReadMap "ReadMap" concept. |
57 | 57 |
typedef UM UCapMap; |
58 | 58 |
|
59 | 59 |
/// \brief The type of the map that stores the lower bound for |
60 | 60 |
/// the supply of the nodes. |
61 | 61 |
/// |
62 | 62 |
/// The type of the map that stores the lower bound for the supply |
63 | 63 |
/// of the nodes. It must meet the \ref concepts::ReadMap "ReadMap" |
64 | 64 |
/// concept. |
65 | 65 |
typedef DM DeltaMap; |
66 | 66 |
|
67 | 67 |
/// \brief The type of the flow values. |
68 | 68 |
typedef typename DeltaMap::Value Value; |
69 | 69 |
|
70 | 70 |
/// \brief The type of the map that stores the flow values. |
71 | 71 |
/// |
72 | 72 |
/// The type of the map that stores the flow values. |
73 | 73 |
/// It must meet the \ref concepts::ReadWriteMap "ReadWriteMap" concept. |
74 | 74 |
typedef typename Digraph::template ArcMap<Value> FlowMap; |
75 | 75 |
|
76 | 76 |
/// \brief Instantiates a FlowMap. |
77 | 77 |
/// |
78 | 78 |
/// This function instantiates a \ref FlowMap. |
79 | 79 |
/// \param digraph The digraph, to which we would like to define |
80 | 80 |
/// the flow map. |
81 | 81 |
static FlowMap* createFlowMap(const Digraph& digraph) { |
82 | 82 |
return new FlowMap(digraph); |
83 | 83 |
} |
84 | 84 |
|
85 | 85 |
/// \brief The elevator type used by the algorithm. |
86 | 86 |
/// |
87 | 87 |
/// The elevator type used by the algorithm. |
88 | 88 |
/// |
89 | 89 |
/// \sa Elevator |
90 | 90 |
/// \sa LinkedElevator |
91 | 91 |
typedef lemon::Elevator<Digraph, typename Digraph::Node> Elevator; |
92 | 92 |
|
93 | 93 |
/// \brief Instantiates an Elevator. |
94 | 94 |
/// |
95 | 95 |
/// This function instantiates an \ref Elevator. |
96 | 96 |
/// \param digraph The digraph, to which we would like to define |
97 | 97 |
/// the elevator. |
98 | 98 |
/// \param max_level The maximum level of the elevator. |
99 | 99 |
static Elevator* createElevator(const Digraph& digraph, int max_level) { |
100 | 100 |
return new Elevator(digraph, max_level); |
101 | 101 |
} |
102 | 102 |
|
103 | 103 |
/// \brief The tolerance used by the algorithm |
104 | 104 |
/// |
105 | 105 |
/// The tolerance used by the algorithm to handle inexact computation. |
106 | 106 |
typedef lemon::Tolerance<Value> Tolerance; |
107 | 107 |
|
108 | 108 |
}; |
109 | 109 |
|
110 | 110 |
/** |
111 | 111 |
\brief Push-relabel algorithm for the network circulation problem. |
112 | 112 |
|
113 | 113 |
\ingroup max_flow |
114 | 114 |
This class implements a push-relabel algorithm for the network |
115 | 115 |
circulation problem. |
116 | 116 |
It is to find a feasible circulation when lower and upper bounds |
117 | 117 |
are given for the flow values on the arcs and lower bounds |
118 | 118 |
are given for the supply values of the nodes. |
119 | 119 |
|
120 | 120 |
The exact formulation of this problem is the following. |
121 | 121 |
Let \f$G=(V,A)\f$ be a digraph, |
122 | 122 |
\f$lower, upper: A\rightarrow\mathbf{R}^+_0\f$, |
123 | 123 |
\f$delta: V\rightarrow\mathbf{R}\f$. Find a feasible circulation |
124 | 124 |
\f$f: A\rightarrow\mathbf{R}^+_0\f$ so that |
125 | 125 |
\f[ \sum_{a\in\delta_{out}(v)} f(a) - \sum_{a\in\delta_{in}(v)} f(a) |
126 | 126 |
\geq delta(v) \quad \forall v\in V, \f] |
127 | 127 |
\f[ lower(a)\leq f(a) \leq upper(a) \quad \forall a\in A. \f] |
128 | 128 |
\note \f$delta(v)\f$ specifies a lower bound for the supply of node |
129 | 129 |
\f$v\f$. It can be either positive or negative, however note that |
130 | 130 |
\f$\sum_{v\in V}delta(v)\f$ should be zero or negative in order to |
131 | 131 |
have a feasible solution. |
132 | 132 |
|
133 | 133 |
\note A special case of this problem is when |
134 | 134 |
\f$\sum_{v\in V}delta(v) = 0\f$. Then the supply of each node \f$v\f$ |
135 | 135 |
will be \e equal \e to \f$delta(v)\f$, if a circulation can be found. |
136 | 136 |
Thus a feasible solution for the |
137 | 137 |
\ref min_cost_flow "minimum cost flow" problem can be calculated |
138 | 138 |
in this way. |
139 | 139 |
|
140 | 140 |
\tparam GR The type of the digraph the algorithm runs on. |
141 | 141 |
\tparam LM The type of the lower bound capacity map. The default |
142 | 142 |
map type is \ref concepts::Digraph::ArcMap "GR::ArcMap<int>". |
143 | 143 |
\tparam UM The type of the upper bound capacity map. The default |
144 | 144 |
map type is \c LM. |
145 | 145 |
\tparam DM The type of the map that stores the lower bound |
146 | 146 |
for the supply of the nodes. The default map type is |
147 | 147 |
\ref concepts::Digraph::NodeMap "GR::NodeMap<UM::Value>". |
148 | 148 |
*/ |
149 | 149 |
#ifdef DOXYGEN |
150 | 150 |
template< typename GR, |
151 | 151 |
typename LM, |
152 | 152 |
typename UM, |
153 | 153 |
typename DM, |
154 | 154 |
typename TR > |
155 | 155 |
#else |
156 | 156 |
template< typename GR, |
157 | 157 |
typename LM = typename GR::template ArcMap<int>, |
158 | 158 |
typename UM = LM, |
159 | 159 |
typename DM = typename GR::template NodeMap<typename UM::Value>, |
160 | 160 |
typename TR = CirculationDefaultTraits<GR, LM, UM, DM> > |
161 | 161 |
#endif |
162 | 162 |
class Circulation { |
163 | 163 |
public: |
164 | 164 |
|
165 | 165 |
///The \ref CirculationDefaultTraits "traits class" of the algorithm. |
166 | 166 |
typedef TR Traits; |
167 | 167 |
///The type of the digraph the algorithm runs on. |
168 | 168 |
typedef typename Traits::Digraph Digraph; |
169 | 169 |
///The type of the flow values. |
170 | 170 |
typedef typename Traits::Value Value; |
171 | 171 |
|
172 | 172 |
/// The type of the lower bound capacity map. |
173 | 173 |
typedef typename Traits::LCapMap LCapMap; |
174 | 174 |
/// The type of the upper bound capacity map. |
175 | 175 |
typedef typename Traits::UCapMap UCapMap; |
176 | 176 |
/// \brief The type of the map that stores the lower bound for |
177 | 177 |
/// the supply of the nodes. |
178 | 178 |
typedef typename Traits::DeltaMap DeltaMap; |
179 | 179 |
///The type of the flow map. |
180 | 180 |
typedef typename Traits::FlowMap FlowMap; |
181 | 181 |
|
182 | 182 |
///The type of the elevator. |
183 | 183 |
typedef typename Traits::Elevator Elevator; |
184 | 184 |
///The type of the tolerance. |
185 | 185 |
typedef typename Traits::Tolerance Tolerance; |
186 | 186 |
|
187 | 187 |
private: |
188 | 188 |
|
189 | 189 |
TEMPLATE_DIGRAPH_TYPEDEFS(Digraph); |
190 | 190 |
|
191 | 191 |
const Digraph &_g; |
192 | 192 |
int _node_num; |
193 | 193 |
|
194 | 194 |
const LCapMap *_lo; |
195 | 195 |
const UCapMap *_up; |
196 | 196 |
const DeltaMap *_delta; |
197 | 197 |
|
198 | 198 |
FlowMap *_flow; |
199 | 199 |
bool _local_flow; |
200 | 200 |
|
201 | 201 |
Elevator* _level; |
202 | 202 |
bool _local_level; |
203 | 203 |
|
204 | 204 |
typedef typename Digraph::template NodeMap<Value> ExcessMap; |
205 | 205 |
ExcessMap* _excess; |
206 | 206 |
|
207 | 207 |
Tolerance _tol; |
208 | 208 |
int _el; |
209 | 209 |
|
210 | 210 |
public: |
211 | 211 |
|
212 | 212 |
typedef Circulation Create; |
213 | 213 |
|
214 | 214 |
///\name Named Template Parameters |
215 | 215 |
|
216 | 216 |
///@{ |
217 | 217 |
|
218 | 218 |
template <typename T> |
219 | 219 |
struct SetFlowMapTraits : public Traits { |
220 | 220 |
typedef T FlowMap; |
221 | 221 |
static FlowMap *createFlowMap(const Digraph&) { |
222 | 222 |
LEMON_ASSERT(false, "FlowMap is not initialized"); |
223 | 223 |
return 0; // ignore warnings |
224 | 224 |
} |
225 | 225 |
}; |
226 | 226 |
|
227 | 227 |
/// \brief \ref named-templ-param "Named parameter" for setting |
228 | 228 |
/// FlowMap type |
229 | 229 |
/// |
230 | 230 |
/// \ref named-templ-param "Named parameter" for setting FlowMap |
231 | 231 |
/// type. |
232 | 232 |
template <typename T> |
233 | 233 |
struct SetFlowMap |
234 | 234 |
: public Circulation<Digraph, LCapMap, UCapMap, DeltaMap, |
235 | 235 |
SetFlowMapTraits<T> > { |
236 | 236 |
typedef Circulation<Digraph, LCapMap, UCapMap, DeltaMap, |
237 | 237 |
SetFlowMapTraits<T> > Create; |
238 | 238 |
}; |
239 | 239 |
|
240 | 240 |
template <typename T> |
241 | 241 |
struct SetElevatorTraits : public Traits { |
242 | 242 |
typedef T Elevator; |
243 | 243 |
static Elevator *createElevator(const Digraph&, int) { |
244 | 244 |
LEMON_ASSERT(false, "Elevator is not initialized"); |
245 | 245 |
return 0; // ignore warnings |
246 | 246 |
} |
247 | 247 |
}; |
248 | 248 |
|
249 | 249 |
/// \brief \ref named-templ-param "Named parameter" for setting |
250 | 250 |
/// Elevator type |
251 | 251 |
/// |
252 | 252 |
/// \ref named-templ-param "Named parameter" for setting Elevator |
253 | 253 |
/// type. If this named parameter is used, then an external |
254 | 254 |
/// elevator object must be passed to the algorithm using the |
255 | 255 |
/// \ref elevator(Elevator&) "elevator()" function before calling |
256 | 256 |
/// \ref run() or \ref init(). |
257 | 257 |
/// \sa SetStandardElevator |
258 | 258 |
template <typename T> |
259 | 259 |
struct SetElevator |
260 | 260 |
: public Circulation<Digraph, LCapMap, UCapMap, DeltaMap, |
261 | 261 |
SetElevatorTraits<T> > { |
262 | 262 |
typedef Circulation<Digraph, LCapMap, UCapMap, DeltaMap, |
263 | 263 |
SetElevatorTraits<T> > Create; |
264 | 264 |
}; |
265 | 265 |
|
266 | 266 |
template <typename T> |
267 | 267 |
struct SetStandardElevatorTraits : public Traits { |
268 | 268 |
typedef T Elevator; |
269 | 269 |
static Elevator *createElevator(const Digraph& digraph, int max_level) { |
270 | 270 |
return new Elevator(digraph, max_level); |
271 | 271 |
} |
272 | 272 |
}; |
273 | 273 |
|
274 | 274 |
/// \brief \ref named-templ-param "Named parameter" for setting |
275 | 275 |
/// Elevator type with automatic allocation |
276 | 276 |
/// |
277 | 277 |
/// \ref named-templ-param "Named parameter" for setting Elevator |
278 | 278 |
/// type with automatic allocation. |
279 | 279 |
/// The Elevator should have standard constructor interface to be |
280 | 280 |
/// able to automatically created by the algorithm (i.e. the |
281 | 281 |
/// digraph and the maximum level should be passed to it). |
282 | 282 |
/// However an external elevator object could also be passed to the |
283 | 283 |
/// algorithm with the \ref elevator(Elevator&) "elevator()" function |
284 | 284 |
/// before calling \ref run() or \ref init(). |
285 | 285 |
/// \sa SetElevator |
286 | 286 |
template <typename T> |
287 | 287 |
struct SetStandardElevator |
288 | 288 |
: public Circulation<Digraph, LCapMap, UCapMap, DeltaMap, |
289 | 289 |
SetStandardElevatorTraits<T> > { |
290 | 290 |
typedef Circulation<Digraph, LCapMap, UCapMap, DeltaMap, |
291 | 291 |
SetStandardElevatorTraits<T> > Create; |
292 | 292 |
}; |
293 | 293 |
|
294 | 294 |
/// @} |
295 | 295 |
|
296 | 296 |
protected: |
297 | 297 |
|
298 | 298 |
Circulation() {} |
299 | 299 |
|
300 | 300 |
public: |
301 | 301 |
|
302 | 302 |
/// The constructor of the class. |
303 | 303 |
|
304 | 304 |
/// The constructor of the class. |
305 | 305 |
/// \param g The digraph the algorithm runs on. |
306 | 306 |
/// \param lo The lower bound capacity of the arcs. |
307 | 307 |
/// \param up The upper bound capacity of the arcs. |
308 | 308 |
/// \param delta The lower bound for the supply of the nodes. |
309 | 309 |
Circulation(const Digraph &g,const LCapMap &lo, |
310 | 310 |
const UCapMap &up,const DeltaMap &delta) |
311 | 311 |
: _g(g), _node_num(), |
312 | 312 |
_lo(&lo),_up(&up),_delta(&delta),_flow(0),_local_flow(false), |
313 | 313 |
_level(0), _local_level(false), _excess(0), _el() {} |
314 | 314 |
|
315 | 315 |
/// Destructor. |
316 | 316 |
~Circulation() { |
317 | 317 |
destroyStructures(); |
318 | 318 |
} |
319 | 319 |
|
320 | 320 |
|
321 | 321 |
private: |
322 | 322 |
|
323 | 323 |
void createStructures() { |
324 | 324 |
_node_num = _el = countNodes(_g); |
325 | 325 |
|
326 | 326 |
if (!_flow) { |
327 | 327 |
_flow = Traits::createFlowMap(_g); |
328 | 328 |
_local_flow = true; |
329 | 329 |
} |
330 | 330 |
if (!_level) { |
331 | 331 |
_level = Traits::createElevator(_g, _node_num); |
332 | 332 |
_local_level = true; |
333 | 333 |
} |
334 | 334 |
if (!_excess) { |
335 | 335 |
_excess = new ExcessMap(_g); |
336 | 336 |
} |
337 | 337 |
} |
338 | 338 |
|
339 | 339 |
void destroyStructures() { |
340 | 340 |
if (_local_flow) { |
341 | 341 |
delete _flow; |
342 | 342 |
} |
343 | 343 |
if (_local_level) { |
344 | 344 |
delete _level; |
345 | 345 |
} |
346 | 346 |
if (_excess) { |
347 | 347 |
delete _excess; |
348 | 348 |
} |
349 | 349 |
} |
350 | 350 |
|
351 | 351 |
public: |
352 | 352 |
|
353 | 353 |
/// Sets the lower bound capacity map. |
354 | 354 |
|
355 | 355 |
/// Sets the lower bound capacity map. |
356 | 356 |
/// \return <tt>(*this)</tt> |
357 | 357 |
Circulation& lowerCapMap(const LCapMap& map) { |
358 | 358 |
_lo = ↦ |
359 | 359 |
return *this; |
360 | 360 |
} |
361 | 361 |
|
362 | 362 |
/// Sets the upper bound capacity map. |
363 | 363 |
|
364 | 364 |
/// Sets the upper bound capacity map. |
365 | 365 |
/// \return <tt>(*this)</tt> |
366 | 366 |
Circulation& upperCapMap(const LCapMap& map) { |
367 | 367 |
_up = ↦ |
368 | 368 |
return *this; |
369 | 369 |
} |
370 | 370 |
|
371 | 371 |
/// Sets the lower bound map for the supply of the nodes. |
372 | 372 |
|
373 | 373 |
/// Sets the lower bound map for the supply of the nodes. |
374 | 374 |
/// \return <tt>(*this)</tt> |
375 | 375 |
Circulation& deltaMap(const DeltaMap& map) { |
376 | 376 |
_delta = ↦ |
377 | 377 |
return *this; |
378 | 378 |
} |
379 | 379 |
|
380 | 380 |
/// \brief Sets the flow map. |
381 | 381 |
/// |
382 | 382 |
/// Sets the flow map. |
383 | 383 |
/// If you don't use this function before calling \ref run() or |
384 | 384 |
/// \ref init(), an instance will be allocated automatically. |
385 | 385 |
/// The destructor deallocates this automatically allocated map, |
386 | 386 |
/// of course. |
387 | 387 |
/// \return <tt>(*this)</tt> |
388 | 388 |
Circulation& flowMap(FlowMap& map) { |
389 | 389 |
if (_local_flow) { |
390 | 390 |
delete _flow; |
391 | 391 |
_local_flow = false; |
392 | 392 |
} |
393 | 393 |
_flow = ↦ |
394 | 394 |
return *this; |
395 | 395 |
} |
396 | 396 |
|
397 | 397 |
/// \brief Sets the elevator used by algorithm. |
398 | 398 |
/// |
399 | 399 |
/// Sets the elevator used by algorithm. |
400 | 400 |
/// If you don't use this function before calling \ref run() or |
401 | 401 |
/// \ref init(), an instance will be allocated automatically. |
402 | 402 |
/// The destructor deallocates this automatically allocated elevator, |
403 | 403 |
/// of course. |
404 | 404 |
/// \return <tt>(*this)</tt> |
405 | 405 |
Circulation& elevator(Elevator& elevator) { |
406 | 406 |
if (_local_level) { |
407 | 407 |
delete _level; |
408 | 408 |
_local_level = false; |
409 | 409 |
} |
410 | 410 |
_level = &elevator; |
411 | 411 |
return *this; |
412 | 412 |
} |
413 | 413 |
|
414 | 414 |
/// \brief Returns a const reference to the elevator. |
415 | 415 |
/// |
416 | 416 |
/// Returns a const reference to the elevator. |
417 | 417 |
/// |
418 | 418 |
/// \pre Either \ref run() or \ref init() must be called before |
419 | 419 |
/// using this function. |
420 | 420 |
const Elevator& elevator() const { |
421 | 421 |
return *_level; |
422 | 422 |
} |
423 | 423 |
|
424 | 424 |
/// \brief Sets the tolerance used by algorithm. |
425 | 425 |
/// |
426 | 426 |
/// Sets the tolerance used by algorithm. |
427 | 427 |
Circulation& tolerance(const Tolerance& tolerance) const { |
428 | 428 |
_tol = tolerance; |
429 | 429 |
return *this; |
430 | 430 |
} |
431 | 431 |
|
432 | 432 |
/// \brief Returns a const reference to the tolerance. |
433 | 433 |
/// |
434 | 434 |
/// Returns a const reference to the tolerance. |
435 | 435 |
const Tolerance& tolerance() const { |
436 | 436 |
return tolerance; |
437 | 437 |
} |
438 | 438 |
|
439 | 439 |
/// \name Execution Control |
440 | 440 |
/// The simplest way to execute the algorithm is to call \ref run().\n |
441 | 441 |
/// If you need more control on the initial solution or the execution, |
442 | 442 |
/// first you have to call one of the \ref init() functions, then |
443 | 443 |
/// the \ref start() function. |
444 | 444 |
|
445 | 445 |
///@{ |
446 | 446 |
|
447 | 447 |
/// Initializes the internal data structures. |
448 | 448 |
|
449 | 449 |
/// Initializes the internal data structures and sets all flow values |
450 | 450 |
/// to the lower bound. |
451 | 451 |
void init() |
452 | 452 |
{ |
453 | 453 |
createStructures(); |
454 | 454 |
|
455 | 455 |
for(NodeIt n(_g);n!=INVALID;++n) { |
456 |
_excess |
|
456 |
(*_excess)[n] = (*_delta)[n]; |
|
457 | 457 |
} |
458 | 458 |
|
459 | 459 |
for (ArcIt e(_g);e!=INVALID;++e) { |
460 | 460 |
_flow->set(e, (*_lo)[e]); |
461 |
_excess->set(_g.target(e), (*_excess)[_g.target(e)] + (*_flow)[e]); |
|
462 |
_excess->set(_g.source(e), (*_excess)[_g.source(e)] - (*_flow)[e]); |
|
461 |
(*_excess)[_g.target(e)] += (*_flow)[e]; |
|
462 |
(*_excess)[_g.source(e)] -= (*_flow)[e]; |
|
463 | 463 |
} |
464 | 464 |
|
465 | 465 |
// global relabeling tested, but in general case it provides |
466 | 466 |
// worse performance for random digraphs |
467 | 467 |
_level->initStart(); |
468 | 468 |
for(NodeIt n(_g);n!=INVALID;++n) |
469 | 469 |
_level->initAddItem(n); |
470 | 470 |
_level->initFinish(); |
471 | 471 |
for(NodeIt n(_g);n!=INVALID;++n) |
472 | 472 |
if(_tol.positive((*_excess)[n])) |
473 | 473 |
_level->activate(n); |
474 | 474 |
} |
475 | 475 |
|
476 | 476 |
/// Initializes the internal data structures using a greedy approach. |
477 | 477 |
|
478 | 478 |
/// Initializes the internal data structures using a greedy approach |
479 | 479 |
/// to construct the initial solution. |
480 | 480 |
void greedyInit() |
481 | 481 |
{ |
482 | 482 |
createStructures(); |
483 | 483 |
|
484 | 484 |
for(NodeIt n(_g);n!=INVALID;++n) { |
485 |
_excess |
|
485 |
(*_excess)[n] = (*_delta)[n]; |
|
486 | 486 |
} |
487 | 487 |
|
488 | 488 |
for (ArcIt e(_g);e!=INVALID;++e) { |
489 | 489 |
if (!_tol.positive((*_excess)[_g.target(e)] + (*_up)[e])) { |
490 | 490 |
_flow->set(e, (*_up)[e]); |
491 |
_excess->set(_g.target(e), (*_excess)[_g.target(e)] + (*_up)[e]); |
|
492 |
_excess->set(_g.source(e), (*_excess)[_g.source(e)] - (*_up)[e]); |
|
491 |
(*_excess)[_g.target(e)] += (*_up)[e]; |
|
492 |
(*_excess)[_g.source(e)] -= (*_up)[e]; |
|
493 | 493 |
} else if (_tol.positive((*_excess)[_g.target(e)] + (*_lo)[e])) { |
494 | 494 |
_flow->set(e, (*_lo)[e]); |
495 |
_excess->set(_g.target(e), (*_excess)[_g.target(e)] + (*_lo)[e]); |
|
496 |
_excess->set(_g.source(e), (*_excess)[_g.source(e)] - (*_lo)[e]); |
|
495 |
(*_excess)[_g.target(e)] += (*_lo)[e]; |
|
496 |
(*_excess)[_g.source(e)] -= (*_lo)[e]; |
|
497 | 497 |
} else { |
498 | 498 |
Value fc = -(*_excess)[_g.target(e)]; |
499 | 499 |
_flow->set(e, fc); |
500 |
_excess->set(_g.target(e), 0); |
|
501 |
_excess->set(_g.source(e), (*_excess)[_g.source(e)] - fc); |
|
500 |
(*_excess)[_g.target(e)] = 0; |
|
501 |
(*_excess)[_g.source(e)] -= fc; |
|
502 | 502 |
} |
503 | 503 |
} |
504 | 504 |
|
505 | 505 |
_level->initStart(); |
506 | 506 |
for(NodeIt n(_g);n!=INVALID;++n) |
507 | 507 |
_level->initAddItem(n); |
508 | 508 |
_level->initFinish(); |
509 | 509 |
for(NodeIt n(_g);n!=INVALID;++n) |
510 | 510 |
if(_tol.positive((*_excess)[n])) |
511 | 511 |
_level->activate(n); |
512 | 512 |
} |
513 | 513 |
|
514 | 514 |
///Executes the algorithm |
515 | 515 |
|
516 | 516 |
///This function executes the algorithm. |
517 | 517 |
/// |
518 | 518 |
///\return \c true if a feasible circulation is found. |
519 | 519 |
/// |
520 | 520 |
///\sa barrier() |
521 | 521 |
///\sa barrierMap() |
522 | 522 |
bool start() |
523 | 523 |
{ |
524 | 524 |
|
525 | 525 |
Node act; |
526 | 526 |
Node bact=INVALID; |
527 | 527 |
Node last_activated=INVALID; |
528 | 528 |
while((act=_level->highestActive())!=INVALID) { |
529 | 529 |
int actlevel=(*_level)[act]; |
530 | 530 |
int mlevel=_node_num; |
531 | 531 |
Value exc=(*_excess)[act]; |
532 | 532 |
|
533 | 533 |
for(OutArcIt e(_g,act);e!=INVALID; ++e) { |
534 | 534 |
Node v = _g.target(e); |
535 | 535 |
Value fc=(*_up)[e]-(*_flow)[e]; |
536 | 536 |
if(!_tol.positive(fc)) continue; |
537 | 537 |
if((*_level)[v]<actlevel) { |
538 | 538 |
if(!_tol.less(fc, exc)) { |
539 | 539 |
_flow->set(e, (*_flow)[e] + exc); |
540 |
|
|
540 |
(*_excess)[v] += exc; |
|
541 | 541 |
if(!_level->active(v) && _tol.positive((*_excess)[v])) |
542 | 542 |
_level->activate(v); |
543 |
|
|
543 |
(*_excess)[act] = 0; |
|
544 | 544 |
_level->deactivate(act); |
545 | 545 |
goto next_l; |
546 | 546 |
} |
547 | 547 |
else { |
548 | 548 |
_flow->set(e, (*_up)[e]); |
549 |
|
|
549 |
(*_excess)[v] += fc; |
|
550 | 550 |
if(!_level->active(v) && _tol.positive((*_excess)[v])) |
551 | 551 |
_level->activate(v); |
552 | 552 |
exc-=fc; |
553 | 553 |
} |
554 | 554 |
} |
555 | 555 |
else if((*_level)[v]<mlevel) mlevel=(*_level)[v]; |
556 | 556 |
} |
557 | 557 |
for(InArcIt e(_g,act);e!=INVALID; ++e) { |
558 | 558 |
Node v = _g.source(e); |
559 | 559 |
Value fc=(*_flow)[e]-(*_lo)[e]; |
560 | 560 |
if(!_tol.positive(fc)) continue; |
561 | 561 |
if((*_level)[v]<actlevel) { |
562 | 562 |
if(!_tol.less(fc, exc)) { |
563 | 563 |
_flow->set(e, (*_flow)[e] - exc); |
564 |
|
|
564 |
(*_excess)[v] += exc; |
|
565 | 565 |
if(!_level->active(v) && _tol.positive((*_excess)[v])) |
566 | 566 |
_level->activate(v); |
567 |
|
|
567 |
(*_excess)[act] = 0; |
|
568 | 568 |
_level->deactivate(act); |
569 | 569 |
goto next_l; |
570 | 570 |
} |
571 | 571 |
else { |
572 | 572 |
_flow->set(e, (*_lo)[e]); |
573 |
|
|
573 |
(*_excess)[v] += fc; |
|
574 | 574 |
if(!_level->active(v) && _tol.positive((*_excess)[v])) |
575 | 575 |
_level->activate(v); |
576 | 576 |
exc-=fc; |
577 | 577 |
} |
578 | 578 |
} |
579 | 579 |
else if((*_level)[v]<mlevel) mlevel=(*_level)[v]; |
580 | 580 |
} |
581 | 581 |
|
582 |
_excess |
|
582 |
(*_excess)[act] = exc; |
|
583 | 583 |
if(!_tol.positive(exc)) _level->deactivate(act); |
584 | 584 |
else if(mlevel==_node_num) { |
585 | 585 |
_level->liftHighestActiveToTop(); |
586 | 586 |
_el = _node_num; |
587 | 587 |
return false; |
588 | 588 |
} |
589 | 589 |
else { |
590 | 590 |
_level->liftHighestActive(mlevel+1); |
591 | 591 |
if(_level->onLevel(actlevel)==0) { |
592 | 592 |
_el = actlevel; |
593 | 593 |
return false; |
594 | 594 |
} |
595 | 595 |
} |
596 | 596 |
next_l: |
597 | 597 |
; |
598 | 598 |
} |
599 | 599 |
return true; |
600 | 600 |
} |
601 | 601 |
|
602 | 602 |
/// Runs the algorithm. |
603 | 603 |
|
604 | 604 |
/// This function runs the algorithm. |
605 | 605 |
/// |
606 | 606 |
/// \return \c true if a feasible circulation is found. |
607 | 607 |
/// |
608 | 608 |
/// \note Apart from the return value, c.run() is just a shortcut of |
609 | 609 |
/// the following code. |
610 | 610 |
/// \code |
611 | 611 |
/// c.greedyInit(); |
612 | 612 |
/// c.start(); |
613 | 613 |
/// \endcode |
614 | 614 |
bool run() { |
615 | 615 |
greedyInit(); |
616 | 616 |
return start(); |
617 | 617 |
} |
618 | 618 |
|
619 | 619 |
/// @} |
620 | 620 |
|
621 | 621 |
/// \name Query Functions |
622 | 622 |
/// The results of the circulation algorithm can be obtained using |
623 | 623 |
/// these functions.\n |
624 | 624 |
/// Either \ref run() or \ref start() should be called before |
625 | 625 |
/// using them. |
626 | 626 |
|
627 | 627 |
///@{ |
628 | 628 |
|
629 | 629 |
/// \brief Returns the flow on the given arc. |
630 | 630 |
/// |
631 | 631 |
/// Returns the flow on the given arc. |
632 | 632 |
/// |
633 | 633 |
/// \pre Either \ref run() or \ref init() must be called before |
634 | 634 |
/// using this function. |
635 | 635 |
Value flow(const Arc& arc) const { |
636 | 636 |
return (*_flow)[arc]; |
637 | 637 |
} |
638 | 638 |
|
639 | 639 |
/// \brief Returns a const reference to the flow map. |
640 | 640 |
/// |
641 | 641 |
/// Returns a const reference to the arc map storing the found flow. |
642 | 642 |
/// |
643 | 643 |
/// \pre Either \ref run() or \ref init() must be called before |
644 | 644 |
/// using this function. |
645 | 645 |
const FlowMap& flowMap() const { |
646 | 646 |
return *_flow; |
647 | 647 |
} |
648 | 648 |
|
649 | 649 |
/** |
650 | 650 |
\brief Returns \c true if the given node is in a barrier. |
651 | 651 |
|
652 | 652 |
Barrier is a set \e B of nodes for which |
653 | 653 |
|
654 | 654 |
\f[ \sum_{a\in\delta_{out}(B)} upper(a) - |
655 | 655 |
\sum_{a\in\delta_{in}(B)} lower(a) < \sum_{v\in B}delta(v) \f] |
656 | 656 |
|
657 | 657 |
holds. The existence of a set with this property prooves that a |
658 | 658 |
feasible circualtion cannot exist. |
659 | 659 |
|
660 | 660 |
This function returns \c true if the given node is in the found |
661 | 661 |
barrier. If a feasible circulation is found, the function |
662 | 662 |
gives back \c false for every node. |
663 | 663 |
|
664 | 664 |
\pre Either \ref run() or \ref init() must be called before |
665 | 665 |
using this function. |
666 | 666 |
|
667 | 667 |
\sa barrierMap() |
668 | 668 |
\sa checkBarrier() |
669 | 669 |
*/ |
670 | 670 |
bool barrier(const Node& node) const |
671 | 671 |
{ |
672 | 672 |
return (*_level)[node] >= _el; |
673 | 673 |
} |
674 | 674 |
|
675 | 675 |
/// \brief Gives back a barrier. |
676 | 676 |
/// |
677 | 677 |
/// This function sets \c bar to the characteristic vector of the |
678 | 678 |
/// found barrier. \c bar should be a \ref concepts::WriteMap "writable" |
679 | 679 |
/// node map with \c bool (or convertible) value type. |
680 | 680 |
/// |
681 | 681 |
/// If a feasible circulation is found, the function gives back an |
682 | 682 |
/// empty set, so \c bar[v] will be \c false for all nodes \c v. |
683 | 683 |
/// |
684 | 684 |
/// \note This function calls \ref barrier() for each node, |
685 | 685 |
/// so it runs in O(n) time. |
686 | 686 |
/// |
687 | 687 |
/// \pre Either \ref run() or \ref init() must be called before |
688 | 688 |
/// using this function. |
689 | 689 |
/// |
690 | 690 |
/// \sa barrier() |
691 | 691 |
/// \sa checkBarrier() |
692 | 692 |
template<class BarrierMap> |
693 | 693 |
void barrierMap(BarrierMap &bar) const |
694 | 694 |
{ |
695 | 695 |
for(NodeIt n(_g);n!=INVALID;++n) |
696 | 696 |
bar.set(n, (*_level)[n] >= _el); |
697 | 697 |
} |
698 | 698 |
|
699 | 699 |
/// @} |
700 | 700 |
|
701 | 701 |
/// \name Checker Functions |
702 | 702 |
/// The feasibility of the results can be checked using |
703 | 703 |
/// these functions.\n |
704 | 704 |
/// Either \ref run() or \ref start() should be called before |
705 | 705 |
/// using them. |
706 | 706 |
|
707 | 707 |
///@{ |
708 | 708 |
|
709 | 709 |
///Check if the found flow is a feasible circulation |
710 | 710 |
|
711 | 711 |
///Check if the found flow is a feasible circulation, |
712 | 712 |
/// |
713 | 713 |
bool checkFlow() const { |
714 | 714 |
for(ArcIt e(_g);e!=INVALID;++e) |
715 | 715 |
if((*_flow)[e]<(*_lo)[e]||(*_flow)[e]>(*_up)[e]) return false; |
716 | 716 |
for(NodeIt n(_g);n!=INVALID;++n) |
717 | 717 |
{ |
718 | 718 |
Value dif=-(*_delta)[n]; |
719 | 719 |
for(InArcIt e(_g,n);e!=INVALID;++e) dif-=(*_flow)[e]; |
720 | 720 |
for(OutArcIt e(_g,n);e!=INVALID;++e) dif+=(*_flow)[e]; |
721 | 721 |
if(_tol.negative(dif)) return false; |
722 | 722 |
} |
723 | 723 |
return true; |
724 | 724 |
} |
725 | 725 |
|
726 | 726 |
///Check whether or not the last execution provides a barrier |
727 | 727 |
|
728 | 728 |
///Check whether or not the last execution provides a barrier. |
729 | 729 |
///\sa barrier() |
730 | 730 |
///\sa barrierMap() |
731 | 731 |
bool checkBarrier() const |
732 | 732 |
{ |
733 | 733 |
Value delta=0; |
734 | 734 |
for(NodeIt n(_g);n!=INVALID;++n) |
735 | 735 |
if(barrier(n)) |
736 | 736 |
delta-=(*_delta)[n]; |
737 | 737 |
for(ArcIt e(_g);e!=INVALID;++e) |
738 | 738 |
{ |
739 | 739 |
Node s=_g.source(e); |
740 | 740 |
Node t=_g.target(e); |
741 | 741 |
if(barrier(s)&&!barrier(t)) delta+=(*_up)[e]; |
742 | 742 |
else if(barrier(t)&&!barrier(s)) delta-=(*_lo)[e]; |
743 | 743 |
} |
744 | 744 |
return _tol.negative(delta); |
745 | 745 |
} |
746 | 746 |
|
747 | 747 |
/// @} |
748 | 748 |
|
749 | 749 |
}; |
750 | 750 |
|
751 | 751 |
} |
752 | 752 |
|
753 | 753 |
#endif |
... | ... |
@@ -550,1297 +550,1297 @@ |
550 | 550 |
/// \brief Make a copy of the given node. |
551 | 551 |
/// |
552 | 552 |
/// This function makes a copy of the given node. |
553 | 553 |
DigraphCopy& node(const Node& node, TNode& tnode) { |
554 | 554 |
_node_maps.push_back(new _core_bits::ItemCopy<From, Node, |
555 | 555 |
NodeRefMap, TNode>(node, tnode)); |
556 | 556 |
return *this; |
557 | 557 |
} |
558 | 558 |
|
559 | 559 |
/// \brief Copy the arc references into the given map. |
560 | 560 |
/// |
561 | 561 |
/// This function copies the arc references into the given map. |
562 | 562 |
/// The parameter should be a map, whose key type is the Arc type of |
563 | 563 |
/// the source digraph, while the value type is the Arc type of the |
564 | 564 |
/// destination digraph. |
565 | 565 |
template <typename ArcRef> |
566 | 566 |
DigraphCopy& arcRef(ArcRef& map) { |
567 | 567 |
_arc_maps.push_back(new _core_bits::RefCopy<From, Arc, |
568 | 568 |
ArcRefMap, ArcRef>(map)); |
569 | 569 |
return *this; |
570 | 570 |
} |
571 | 571 |
|
572 | 572 |
/// \brief Copy the arc cross references into the given map. |
573 | 573 |
/// |
574 | 574 |
/// This function copies the arc cross references (reverse references) |
575 | 575 |
/// into the given map. The parameter should be a map, whose key type |
576 | 576 |
/// is the Arc type of the destination digraph, while the value type is |
577 | 577 |
/// the Arc type of the source digraph. |
578 | 578 |
template <typename ArcCrossRef> |
579 | 579 |
DigraphCopy& arcCrossRef(ArcCrossRef& map) { |
580 | 580 |
_arc_maps.push_back(new _core_bits::CrossRefCopy<From, Arc, |
581 | 581 |
ArcRefMap, ArcCrossRef>(map)); |
582 | 582 |
return *this; |
583 | 583 |
} |
584 | 584 |
|
585 | 585 |
/// \brief Make a copy of the given arc map. |
586 | 586 |
/// |
587 | 587 |
/// This function makes a copy of the given arc map for the newly |
588 | 588 |
/// created digraph. |
589 | 589 |
/// The key type of the new map \c tmap should be the Arc type of the |
590 | 590 |
/// destination digraph, and the key type of the original map \c map |
591 | 591 |
/// should be the Arc type of the source digraph. |
592 | 592 |
template <typename FromMap, typename ToMap> |
593 | 593 |
DigraphCopy& arcMap(const FromMap& map, ToMap& tmap) { |
594 | 594 |
_arc_maps.push_back(new _core_bits::MapCopy<From, Arc, |
595 | 595 |
ArcRefMap, FromMap, ToMap>(map, tmap)); |
596 | 596 |
return *this; |
597 | 597 |
} |
598 | 598 |
|
599 | 599 |
/// \brief Make a copy of the given arc. |
600 | 600 |
/// |
601 | 601 |
/// This function makes a copy of the given arc. |
602 | 602 |
DigraphCopy& arc(const Arc& arc, TArc& tarc) { |
603 | 603 |
_arc_maps.push_back(new _core_bits::ItemCopy<From, Arc, |
604 | 604 |
ArcRefMap, TArc>(arc, tarc)); |
605 | 605 |
return *this; |
606 | 606 |
} |
607 | 607 |
|
608 | 608 |
/// \brief Execute copying. |
609 | 609 |
/// |
610 | 610 |
/// This function executes the copying of the digraph along with the |
611 | 611 |
/// copying of the assigned data. |
612 | 612 |
void run() { |
613 | 613 |
NodeRefMap nodeRefMap(_from); |
614 | 614 |
ArcRefMap arcRefMap(_from); |
615 | 615 |
_core_bits::DigraphCopySelector<To>:: |
616 | 616 |
copy(_from, _to, nodeRefMap, arcRefMap); |
617 | 617 |
for (int i = 0; i < int(_node_maps.size()); ++i) { |
618 | 618 |
_node_maps[i]->copy(_from, nodeRefMap); |
619 | 619 |
} |
620 | 620 |
for (int i = 0; i < int(_arc_maps.size()); ++i) { |
621 | 621 |
_arc_maps[i]->copy(_from, arcRefMap); |
622 | 622 |
} |
623 | 623 |
} |
624 | 624 |
|
625 | 625 |
protected: |
626 | 626 |
|
627 | 627 |
const From& _from; |
628 | 628 |
To& _to; |
629 | 629 |
|
630 | 630 |
std::vector<_core_bits::MapCopyBase<From, Node, NodeRefMap>* > |
631 | 631 |
_node_maps; |
632 | 632 |
|
633 | 633 |
std::vector<_core_bits::MapCopyBase<From, Arc, ArcRefMap>* > |
634 | 634 |
_arc_maps; |
635 | 635 |
|
636 | 636 |
}; |
637 | 637 |
|
638 | 638 |
/// \brief Copy a digraph to another digraph. |
639 | 639 |
/// |
640 | 640 |
/// This function copies a digraph to another digraph. |
641 | 641 |
/// The complete usage of it is detailed in the DigraphCopy class, but |
642 | 642 |
/// a short example shows a basic work: |
643 | 643 |
///\code |
644 | 644 |
/// digraphCopy(src, trg).nodeRef(nr).arcCrossRef(acr).run(); |
645 | 645 |
///\endcode |
646 | 646 |
/// |
647 | 647 |
/// After the copy the \c nr map will contain the mapping from the |
648 | 648 |
/// nodes of the \c from digraph to the nodes of the \c to digraph and |
649 | 649 |
/// \c acr will contain the mapping from the arcs of the \c to digraph |
650 | 650 |
/// to the arcs of the \c from digraph. |
651 | 651 |
/// |
652 | 652 |
/// \see DigraphCopy |
653 | 653 |
template <typename From, typename To> |
654 | 654 |
DigraphCopy<From, To> digraphCopy(const From& from, To& to) { |
655 | 655 |
return DigraphCopy<From, To>(from, to); |
656 | 656 |
} |
657 | 657 |
|
658 | 658 |
/// \brief Class to copy a graph. |
659 | 659 |
/// |
660 | 660 |
/// Class to copy a graph to another graph (duplicate a graph). The |
661 | 661 |
/// simplest way of using it is through the \c graphCopy() function. |
662 | 662 |
/// |
663 | 663 |
/// This class not only make a copy of a graph, but it can create |
664 | 664 |
/// references and cross references between the nodes, edges and arcs of |
665 | 665 |
/// the two graphs, and it can copy maps for using with the newly created |
666 | 666 |
/// graph. |
667 | 667 |
/// |
668 | 668 |
/// To make a copy from a graph, first an instance of GraphCopy |
669 | 669 |
/// should be created, then the data belongs to the graph should |
670 | 670 |
/// assigned to copy. In the end, the \c run() member should be |
671 | 671 |
/// called. |
672 | 672 |
/// |
673 | 673 |
/// The next code copies a graph with several data: |
674 | 674 |
///\code |
675 | 675 |
/// GraphCopy<OrigGraph, NewGraph> cg(orig_graph, new_graph); |
676 | 676 |
/// // Create references for the nodes |
677 | 677 |
/// OrigGraph::NodeMap<NewGraph::Node> nr(orig_graph); |
678 | 678 |
/// cg.nodeRef(nr); |
679 | 679 |
/// // Create cross references (inverse) for the edges |
680 | 680 |
/// NewGraph::EdgeMap<OrigGraph::Edge> ecr(new_graph); |
681 | 681 |
/// cg.edgeCrossRef(ecr); |
682 | 682 |
/// // Copy an edge map |
683 | 683 |
/// OrigGraph::EdgeMap<double> oemap(orig_graph); |
684 | 684 |
/// NewGraph::EdgeMap<double> nemap(new_graph); |
685 | 685 |
/// cg.edgeMap(oemap, nemap); |
686 | 686 |
/// // Copy a node |
687 | 687 |
/// OrigGraph::Node on; |
688 | 688 |
/// NewGraph::Node nn; |
689 | 689 |
/// cg.node(on, nn); |
690 | 690 |
/// // Execute copying |
691 | 691 |
/// cg.run(); |
692 | 692 |
///\endcode |
693 | 693 |
template <typename From, typename To> |
694 | 694 |
class GraphCopy { |
695 | 695 |
private: |
696 | 696 |
|
697 | 697 |
typedef typename From::Node Node; |
698 | 698 |
typedef typename From::NodeIt NodeIt; |
699 | 699 |
typedef typename From::Arc Arc; |
700 | 700 |
typedef typename From::ArcIt ArcIt; |
701 | 701 |
typedef typename From::Edge Edge; |
702 | 702 |
typedef typename From::EdgeIt EdgeIt; |
703 | 703 |
|
704 | 704 |
typedef typename To::Node TNode; |
705 | 705 |
typedef typename To::Arc TArc; |
706 | 706 |
typedef typename To::Edge TEdge; |
707 | 707 |
|
708 | 708 |
typedef typename From::template NodeMap<TNode> NodeRefMap; |
709 | 709 |
typedef typename From::template EdgeMap<TEdge> EdgeRefMap; |
710 | 710 |
|
711 | 711 |
struct ArcRefMap { |
712 | 712 |
ArcRefMap(const From& from, const To& to, |
713 | 713 |
const EdgeRefMap& edge_ref, const NodeRefMap& node_ref) |
714 | 714 |
: _from(from), _to(to), |
715 | 715 |
_edge_ref(edge_ref), _node_ref(node_ref) {} |
716 | 716 |
|
717 | 717 |
typedef typename From::Arc Key; |
718 | 718 |
typedef typename To::Arc Value; |
719 | 719 |
|
720 | 720 |
Value operator[](const Key& key) const { |
721 | 721 |
bool forward = _from.u(key) != _from.v(key) ? |
722 | 722 |
_node_ref[_from.source(key)] == |
723 | 723 |
_to.source(_to.direct(_edge_ref[key], true)) : |
724 | 724 |
_from.direction(key); |
725 | 725 |
return _to.direct(_edge_ref[key], forward); |
726 | 726 |
} |
727 | 727 |
|
728 | 728 |
const From& _from; |
729 | 729 |
const To& _to; |
730 | 730 |
const EdgeRefMap& _edge_ref; |
731 | 731 |
const NodeRefMap& _node_ref; |
732 | 732 |
}; |
733 | 733 |
|
734 | 734 |
public: |
735 | 735 |
|
736 | 736 |
/// \brief Constructor of GraphCopy. |
737 | 737 |
/// |
738 | 738 |
/// Constructor of GraphCopy for copying the content of the |
739 | 739 |
/// \c from graph into the \c to graph. |
740 | 740 |
GraphCopy(const From& from, To& to) |
741 | 741 |
: _from(from), _to(to) {} |
742 | 742 |
|
743 | 743 |
/// \brief Destructor of GraphCopy |
744 | 744 |
/// |
745 | 745 |
/// Destructor of GraphCopy. |
746 | 746 |
~GraphCopy() { |
747 | 747 |
for (int i = 0; i < int(_node_maps.size()); ++i) { |
748 | 748 |
delete _node_maps[i]; |
749 | 749 |
} |
750 | 750 |
for (int i = 0; i < int(_arc_maps.size()); ++i) { |
751 | 751 |
delete _arc_maps[i]; |
752 | 752 |
} |
753 | 753 |
for (int i = 0; i < int(_edge_maps.size()); ++i) { |
754 | 754 |
delete _edge_maps[i]; |
755 | 755 |
} |
756 | 756 |
} |
757 | 757 |
|
758 | 758 |
/// \brief Copy the node references into the given map. |
759 | 759 |
/// |
760 | 760 |
/// This function copies the node references into the given map. |
761 | 761 |
/// The parameter should be a map, whose key type is the Node type of |
762 | 762 |
/// the source graph, while the value type is the Node type of the |
763 | 763 |
/// destination graph. |
764 | 764 |
template <typename NodeRef> |
765 | 765 |
GraphCopy& nodeRef(NodeRef& map) { |
766 | 766 |
_node_maps.push_back(new _core_bits::RefCopy<From, Node, |
767 | 767 |
NodeRefMap, NodeRef>(map)); |
768 | 768 |
return *this; |
769 | 769 |
} |
770 | 770 |
|
771 | 771 |
/// \brief Copy the node cross references into the given map. |
772 | 772 |
/// |
773 | 773 |
/// This function copies the node cross references (reverse references) |
774 | 774 |
/// into the given map. The parameter should be a map, whose key type |
775 | 775 |
/// is the Node type of the destination graph, while the value type is |
776 | 776 |
/// the Node type of the source graph. |
777 | 777 |
template <typename NodeCrossRef> |
778 | 778 |
GraphCopy& nodeCrossRef(NodeCrossRef& map) { |
779 | 779 |
_node_maps.push_back(new _core_bits::CrossRefCopy<From, Node, |
780 | 780 |
NodeRefMap, NodeCrossRef>(map)); |
781 | 781 |
return *this; |
782 | 782 |
} |
783 | 783 |
|
784 | 784 |
/// \brief Make a copy of the given node map. |
785 | 785 |
/// |
786 | 786 |
/// This function makes a copy of the given node map for the newly |
787 | 787 |
/// created graph. |
788 | 788 |
/// The key type of the new map \c tmap should be the Node type of the |
789 | 789 |
/// destination graph, and the key type of the original map \c map |
790 | 790 |
/// should be the Node type of the source graph. |
791 | 791 |
template <typename FromMap, typename ToMap> |
792 | 792 |
GraphCopy& nodeMap(const FromMap& map, ToMap& tmap) { |
793 | 793 |
_node_maps.push_back(new _core_bits::MapCopy<From, Node, |
794 | 794 |
NodeRefMap, FromMap, ToMap>(map, tmap)); |
795 | 795 |
return *this; |
796 | 796 |
} |
797 | 797 |
|
798 | 798 |
/// \brief Make a copy of the given node. |
799 | 799 |
/// |
800 | 800 |
/// This function makes a copy of the given node. |
801 | 801 |
GraphCopy& node(const Node& node, TNode& tnode) { |
802 | 802 |
_node_maps.push_back(new _core_bits::ItemCopy<From, Node, |
803 | 803 |
NodeRefMap, TNode>(node, tnode)); |
804 | 804 |
return *this; |
805 | 805 |
} |
806 | 806 |
|
807 | 807 |
/// \brief Copy the arc references into the given map. |
808 | 808 |
/// |
809 | 809 |
/// This function copies the arc references into the given map. |
810 | 810 |
/// The parameter should be a map, whose key type is the Arc type of |
811 | 811 |
/// the source graph, while the value type is the Arc type of the |
812 | 812 |
/// destination graph. |
813 | 813 |
template <typename ArcRef> |
814 | 814 |
GraphCopy& arcRef(ArcRef& map) { |
815 | 815 |
_arc_maps.push_back(new _core_bits::RefCopy<From, Arc, |
816 | 816 |
ArcRefMap, ArcRef>(map)); |
817 | 817 |
return *this; |
818 | 818 |
} |
819 | 819 |
|
820 | 820 |
/// \brief Copy the arc cross references into the given map. |
821 | 821 |
/// |
822 | 822 |
/// This function copies the arc cross references (reverse references) |
823 | 823 |
/// into the given map. The parameter should be a map, whose key type |
824 | 824 |
/// is the Arc type of the destination graph, while the value type is |
825 | 825 |
/// the Arc type of the source graph. |
826 | 826 |
template <typename ArcCrossRef> |
827 | 827 |
GraphCopy& arcCrossRef(ArcCrossRef& map) { |
828 | 828 |
_arc_maps.push_back(new _core_bits::CrossRefCopy<From, Arc, |
829 | 829 |
ArcRefMap, ArcCrossRef>(map)); |
830 | 830 |
return *this; |
831 | 831 |
} |
832 | 832 |
|
833 | 833 |
/// \brief Make a copy of the given arc map. |
834 | 834 |
/// |
835 | 835 |
/// This function makes a copy of the given arc map for the newly |
836 | 836 |
/// created graph. |
837 | 837 |
/// The key type of the new map \c tmap should be the Arc type of the |
838 | 838 |
/// destination graph, and the key type of the original map \c map |
839 | 839 |
/// should be the Arc type of the source graph. |
840 | 840 |
template <typename FromMap, typename ToMap> |
841 | 841 |
GraphCopy& arcMap(const FromMap& map, ToMap& tmap) { |
842 | 842 |
_arc_maps.push_back(new _core_bits::MapCopy<From, Arc, |
843 | 843 |
ArcRefMap, FromMap, ToMap>(map, tmap)); |
844 | 844 |
return *this; |
845 | 845 |
} |
846 | 846 |
|
847 | 847 |
/// \brief Make a copy of the given arc. |
848 | 848 |
/// |
849 | 849 |
/// This function makes a copy of the given arc. |
850 | 850 |
GraphCopy& arc(const Arc& arc, TArc& tarc) { |
851 | 851 |
_arc_maps.push_back(new _core_bits::ItemCopy<From, Arc, |
852 | 852 |
ArcRefMap, TArc>(arc, tarc)); |
853 | 853 |
return *this; |
854 | 854 |
} |
855 | 855 |
|
856 | 856 |
/// \brief Copy the edge references into the given map. |
857 | 857 |
/// |
858 | 858 |
/// This function copies the edge references into the given map. |
859 | 859 |
/// The parameter should be a map, whose key type is the Edge type of |
860 | 860 |
/// the source graph, while the value type is the Edge type of the |
861 | 861 |
/// destination graph. |
862 | 862 |
template <typename EdgeRef> |
863 | 863 |
GraphCopy& edgeRef(EdgeRef& map) { |
864 | 864 |
_edge_maps.push_back(new _core_bits::RefCopy<From, Edge, |
865 | 865 |
EdgeRefMap, EdgeRef>(map)); |
866 | 866 |
return *this; |
867 | 867 |
} |
868 | 868 |
|
869 | 869 |
/// \brief Copy the edge cross references into the given map. |
870 | 870 |
/// |
871 | 871 |
/// This function copies the edge cross references (reverse references) |
872 | 872 |
/// into the given map. The parameter should be a map, whose key type |
873 | 873 |
/// is the Edge type of the destination graph, while the value type is |
874 | 874 |
/// the Edge type of the source graph. |
875 | 875 |
template <typename EdgeCrossRef> |
876 | 876 |
GraphCopy& edgeCrossRef(EdgeCrossRef& map) { |
877 | 877 |
_edge_maps.push_back(new _core_bits::CrossRefCopy<From, |
878 | 878 |
Edge, EdgeRefMap, EdgeCrossRef>(map)); |
879 | 879 |
return *this; |
880 | 880 |
} |
881 | 881 |
|
882 | 882 |
/// \brief Make a copy of the given edge map. |
883 | 883 |
/// |
884 | 884 |
/// This function makes a copy of the given edge map for the newly |
885 | 885 |
/// created graph. |
886 | 886 |
/// The key type of the new map \c tmap should be the Edge type of the |
887 | 887 |
/// destination graph, and the key type of the original map \c map |
888 | 888 |
/// should be the Edge type of the source graph. |
889 | 889 |
template <typename FromMap, typename ToMap> |
890 | 890 |
GraphCopy& edgeMap(const FromMap& map, ToMap& tmap) { |
891 | 891 |
_edge_maps.push_back(new _core_bits::MapCopy<From, Edge, |
892 | 892 |
EdgeRefMap, FromMap, ToMap>(map, tmap)); |
893 | 893 |
return *this; |
894 | 894 |
} |
895 | 895 |
|
896 | 896 |
/// \brief Make a copy of the given edge. |
897 | 897 |
/// |
898 | 898 |
/// This function makes a copy of the given edge. |
899 | 899 |
GraphCopy& edge(const Edge& edge, TEdge& tedge) { |
900 | 900 |
_edge_maps.push_back(new _core_bits::ItemCopy<From, Edge, |
901 | 901 |
EdgeRefMap, TEdge>(edge, tedge)); |
902 | 902 |
return *this; |
903 | 903 |
} |
904 | 904 |
|
905 | 905 |
/// \brief Execute copying. |
906 | 906 |
/// |
907 | 907 |
/// This function executes the copying of the graph along with the |
908 | 908 |
/// copying of the assigned data. |
909 | 909 |
void run() { |
910 | 910 |
NodeRefMap nodeRefMap(_from); |
911 | 911 |
EdgeRefMap edgeRefMap(_from); |
912 | 912 |
ArcRefMap arcRefMap(_from, _to, edgeRefMap, nodeRefMap); |
913 | 913 |
_core_bits::GraphCopySelector<To>:: |
914 | 914 |
copy(_from, _to, nodeRefMap, edgeRefMap); |
915 | 915 |
for (int i = 0; i < int(_node_maps.size()); ++i) { |
916 | 916 |
_node_maps[i]->copy(_from, nodeRefMap); |
917 | 917 |
} |
918 | 918 |
for (int i = 0; i < int(_edge_maps.size()); ++i) { |
919 | 919 |
_edge_maps[i]->copy(_from, edgeRefMap); |
920 | 920 |
} |
921 | 921 |
for (int i = 0; i < int(_arc_maps.size()); ++i) { |
922 | 922 |
_arc_maps[i]->copy(_from, arcRefMap); |
923 | 923 |
} |
924 | 924 |
} |
925 | 925 |
|
926 | 926 |
private: |
927 | 927 |
|
928 | 928 |
const From& _from; |
929 | 929 |
To& _to; |
930 | 930 |
|
931 | 931 |
std::vector<_core_bits::MapCopyBase<From, Node, NodeRefMap>* > |
932 | 932 |
_node_maps; |
933 | 933 |
|
934 | 934 |
std::vector<_core_bits::MapCopyBase<From, Arc, ArcRefMap>* > |
935 | 935 |
_arc_maps; |
936 | 936 |
|
937 | 937 |
std::vector<_core_bits::MapCopyBase<From, Edge, EdgeRefMap>* > |
938 | 938 |
_edge_maps; |
939 | 939 |
|
940 | 940 |
}; |
941 | 941 |
|
942 | 942 |
/// \brief Copy a graph to another graph. |
943 | 943 |
/// |
944 | 944 |
/// This function copies a graph to another graph. |
945 | 945 |
/// The complete usage of it is detailed in the GraphCopy class, |
946 | 946 |
/// but a short example shows a basic work: |
947 | 947 |
///\code |
948 | 948 |
/// graphCopy(src, trg).nodeRef(nr).edgeCrossRef(ecr).run(); |
949 | 949 |
///\endcode |
950 | 950 |
/// |
951 | 951 |
/// After the copy the \c nr map will contain the mapping from the |
952 | 952 |
/// nodes of the \c from graph to the nodes of the \c to graph and |
953 | 953 |
/// \c ecr will contain the mapping from the edges of the \c to graph |
954 | 954 |
/// to the edges of the \c from graph. |
955 | 955 |
/// |
956 | 956 |
/// \see GraphCopy |
957 | 957 |
template <typename From, typename To> |
958 | 958 |
GraphCopy<From, To> |
959 | 959 |
graphCopy(const From& from, To& to) { |
960 | 960 |
return GraphCopy<From, To>(from, to); |
961 | 961 |
} |
962 | 962 |
|
963 | 963 |
namespace _core_bits { |
964 | 964 |
|
965 | 965 |
template <typename Graph, typename Enable = void> |
966 | 966 |
struct FindArcSelector { |
967 | 967 |
typedef typename Graph::Node Node; |
968 | 968 |
typedef typename Graph::Arc Arc; |
969 | 969 |
static Arc find(const Graph &g, Node u, Node v, Arc e) { |
970 | 970 |
if (e == INVALID) { |
971 | 971 |
g.firstOut(e, u); |
972 | 972 |
} else { |
973 | 973 |
g.nextOut(e); |
974 | 974 |
} |
975 | 975 |
while (e != INVALID && g.target(e) != v) { |
976 | 976 |
g.nextOut(e); |
977 | 977 |
} |
978 | 978 |
return e; |
979 | 979 |
} |
980 | 980 |
}; |
981 | 981 |
|
982 | 982 |
template <typename Graph> |
983 | 983 |
struct FindArcSelector< |
984 | 984 |
Graph, |
985 | 985 |
typename enable_if<typename Graph::FindArcTag, void>::type> |
986 | 986 |
{ |
987 | 987 |
typedef typename Graph::Node Node; |
988 | 988 |
typedef typename Graph::Arc Arc; |
989 | 989 |
static Arc find(const Graph &g, Node u, Node v, Arc prev) { |
990 | 990 |
return g.findArc(u, v, prev); |
991 | 991 |
} |
992 | 992 |
}; |
993 | 993 |
} |
994 | 994 |
|
995 | 995 |
/// \brief Find an arc between two nodes of a digraph. |
996 | 996 |
/// |
997 | 997 |
/// This function finds an arc from node \c u to node \c v in the |
998 | 998 |
/// digraph \c g. |
999 | 999 |
/// |
1000 | 1000 |
/// If \c prev is \ref INVALID (this is the default value), then |
1001 | 1001 |
/// it finds the first arc from \c u to \c v. Otherwise it looks for |
1002 | 1002 |
/// the next arc from \c u to \c v after \c prev. |
1003 | 1003 |
/// \return The found arc or \ref INVALID if there is no such an arc. |
1004 | 1004 |
/// |
1005 | 1005 |
/// Thus you can iterate through each arc from \c u to \c v as it follows. |
1006 | 1006 |
///\code |
1007 | 1007 |
/// for(Arc e = findArc(g,u,v); e != INVALID; e = findArc(g,u,v,e)) { |
1008 | 1008 |
/// ... |
1009 | 1009 |
/// } |
1010 | 1010 |
///\endcode |
1011 | 1011 |
/// |
1012 | 1012 |
/// \note \ref ConArcIt provides iterator interface for the same |
1013 | 1013 |
/// functionality. |
1014 | 1014 |
/// |
1015 | 1015 |
///\sa ConArcIt |
1016 | 1016 |
///\sa ArcLookUp, AllArcLookUp, DynArcLookUp |
1017 | 1017 |
template <typename Graph> |
1018 | 1018 |
inline typename Graph::Arc |
1019 | 1019 |
findArc(const Graph &g, typename Graph::Node u, typename Graph::Node v, |
1020 | 1020 |
typename Graph::Arc prev = INVALID) { |
1021 | 1021 |
return _core_bits::FindArcSelector<Graph>::find(g, u, v, prev); |
1022 | 1022 |
} |
1023 | 1023 |
|
1024 | 1024 |
/// \brief Iterator for iterating on parallel arcs connecting the same nodes. |
1025 | 1025 |
/// |
1026 | 1026 |
/// Iterator for iterating on parallel arcs connecting the same nodes. It is |
1027 | 1027 |
/// a higher level interface for the \ref findArc() function. You can |
1028 | 1028 |
/// use it the following way: |
1029 | 1029 |
///\code |
1030 | 1030 |
/// for (ConArcIt<Graph> it(g, src, trg); it != INVALID; ++it) { |
1031 | 1031 |
/// ... |
1032 | 1032 |
/// } |
1033 | 1033 |
///\endcode |
1034 | 1034 |
/// |
1035 | 1035 |
///\sa findArc() |
1036 | 1036 |
///\sa ArcLookUp, AllArcLookUp, DynArcLookUp |
1037 | 1037 |
template <typename GR> |
1038 | 1038 |
class ConArcIt : public GR::Arc { |
1039 | 1039 |
public: |
1040 | 1040 |
|
1041 | 1041 |
typedef GR Graph; |
1042 | 1042 |
typedef typename Graph::Arc Parent; |
1043 | 1043 |
|
1044 | 1044 |
typedef typename Graph::Arc Arc; |
1045 | 1045 |
typedef typename Graph::Node Node; |
1046 | 1046 |
|
1047 | 1047 |
/// \brief Constructor. |
1048 | 1048 |
/// |
1049 | 1049 |
/// Construct a new ConArcIt iterating on the arcs that |
1050 | 1050 |
/// connects nodes \c u and \c v. |
1051 | 1051 |
ConArcIt(const Graph& g, Node u, Node v) : _graph(g) { |
1052 | 1052 |
Parent::operator=(findArc(_graph, u, v)); |
1053 | 1053 |
} |
1054 | 1054 |
|
1055 | 1055 |
/// \brief Constructor. |
1056 | 1056 |
/// |
1057 | 1057 |
/// Construct a new ConArcIt that continues the iterating from arc \c a. |
1058 | 1058 |
ConArcIt(const Graph& g, Arc a) : Parent(a), _graph(g) {} |
1059 | 1059 |
|
1060 | 1060 |
/// \brief Increment operator. |
1061 | 1061 |
/// |
1062 | 1062 |
/// It increments the iterator and gives back the next arc. |
1063 | 1063 |
ConArcIt& operator++() { |
1064 | 1064 |
Parent::operator=(findArc(_graph, _graph.source(*this), |
1065 | 1065 |
_graph.target(*this), *this)); |
1066 | 1066 |
return *this; |
1067 | 1067 |
} |
1068 | 1068 |
private: |
1069 | 1069 |
const Graph& _graph; |
1070 | 1070 |
}; |
1071 | 1071 |
|
1072 | 1072 |
namespace _core_bits { |
1073 | 1073 |
|
1074 | 1074 |
template <typename Graph, typename Enable = void> |
1075 | 1075 |
struct FindEdgeSelector { |
1076 | 1076 |
typedef typename Graph::Node Node; |
1077 | 1077 |
typedef typename Graph::Edge Edge; |
1078 | 1078 |
static Edge find(const Graph &g, Node u, Node v, Edge e) { |
1079 | 1079 |
bool b; |
1080 | 1080 |
if (u != v) { |
1081 | 1081 |
if (e == INVALID) { |
1082 | 1082 |
g.firstInc(e, b, u); |
1083 | 1083 |
} else { |
1084 | 1084 |
b = g.u(e) == u; |
1085 | 1085 |
g.nextInc(e, b); |
1086 | 1086 |
} |
1087 | 1087 |
while (e != INVALID && (b ? g.v(e) : g.u(e)) != v) { |
1088 | 1088 |
g.nextInc(e, b); |
1089 | 1089 |
} |
1090 | 1090 |
} else { |
1091 | 1091 |
if (e == INVALID) { |
1092 | 1092 |
g.firstInc(e, b, u); |
1093 | 1093 |
} else { |
1094 | 1094 |
b = true; |
1095 | 1095 |
g.nextInc(e, b); |
1096 | 1096 |
} |
1097 | 1097 |
while (e != INVALID && (!b || g.v(e) != v)) { |
1098 | 1098 |
g.nextInc(e, b); |
1099 | 1099 |
} |
1100 | 1100 |
} |
1101 | 1101 |
return e; |
1102 | 1102 |
} |
1103 | 1103 |
}; |
1104 | 1104 |
|
1105 | 1105 |
template <typename Graph> |
1106 | 1106 |
struct FindEdgeSelector< |
1107 | 1107 |
Graph, |
1108 | 1108 |
typename enable_if<typename Graph::FindEdgeTag, void>::type> |
1109 | 1109 |
{ |
1110 | 1110 |
typedef typename Graph::Node Node; |
1111 | 1111 |
typedef typename Graph::Edge Edge; |
1112 | 1112 |
static Edge find(const Graph &g, Node u, Node v, Edge prev) { |
1113 | 1113 |
return g.findEdge(u, v, prev); |
1114 | 1114 |
} |
1115 | 1115 |
}; |
1116 | 1116 |
} |
1117 | 1117 |
|
1118 | 1118 |
/// \brief Find an edge between two nodes of a graph. |
1119 | 1119 |
/// |
1120 | 1120 |
/// This function finds an edge from node \c u to node \c v in graph \c g. |
1121 | 1121 |
/// If node \c u and node \c v is equal then each loop edge |
1122 | 1122 |
/// will be enumerated once. |
1123 | 1123 |
/// |
1124 | 1124 |
/// If \c prev is \ref INVALID (this is the default value), then |
1125 | 1125 |
/// it finds the first edge from \c u to \c v. Otherwise it looks for |
1126 | 1126 |
/// the next edge from \c u to \c v after \c prev. |
1127 | 1127 |
/// \return The found edge or \ref INVALID if there is no such an edge. |
1128 | 1128 |
/// |
1129 | 1129 |
/// Thus you can iterate through each edge between \c u and \c v |
1130 | 1130 |
/// as it follows. |
1131 | 1131 |
///\code |
1132 | 1132 |
/// for(Edge e = findEdge(g,u,v); e != INVALID; e = findEdge(g,u,v,e)) { |
1133 | 1133 |
/// ... |
1134 | 1134 |
/// } |
1135 | 1135 |
///\endcode |
1136 | 1136 |
/// |
1137 | 1137 |
/// \note \ref ConEdgeIt provides iterator interface for the same |
1138 | 1138 |
/// functionality. |
1139 | 1139 |
/// |
1140 | 1140 |
///\sa ConEdgeIt |
1141 | 1141 |
template <typename Graph> |
1142 | 1142 |
inline typename Graph::Edge |
1143 | 1143 |
findEdge(const Graph &g, typename Graph::Node u, typename Graph::Node v, |
1144 | 1144 |
typename Graph::Edge p = INVALID) { |
1145 | 1145 |
return _core_bits::FindEdgeSelector<Graph>::find(g, u, v, p); |
1146 | 1146 |
} |
1147 | 1147 |
|
1148 | 1148 |
/// \brief Iterator for iterating on parallel edges connecting the same nodes. |
1149 | 1149 |
/// |
1150 | 1150 |
/// Iterator for iterating on parallel edges connecting the same nodes. |
1151 | 1151 |
/// It is a higher level interface for the findEdge() function. You can |
1152 | 1152 |
/// use it the following way: |
1153 | 1153 |
///\code |
1154 | 1154 |
/// for (ConEdgeIt<Graph> it(g, u, v); it != INVALID; ++it) { |
1155 | 1155 |
/// ... |
1156 | 1156 |
/// } |
1157 | 1157 |
///\endcode |
1158 | 1158 |
/// |
1159 | 1159 |
///\sa findEdge() |
1160 | 1160 |
template <typename GR> |
1161 | 1161 |
class ConEdgeIt : public GR::Edge { |
1162 | 1162 |
public: |
1163 | 1163 |
|
1164 | 1164 |
typedef GR Graph; |
1165 | 1165 |
typedef typename Graph::Edge Parent; |
1166 | 1166 |
|
1167 | 1167 |
typedef typename Graph::Edge Edge; |
1168 | 1168 |
typedef typename Graph::Node Node; |
1169 | 1169 |
|
1170 | 1170 |
/// \brief Constructor. |
1171 | 1171 |
/// |
1172 | 1172 |
/// Construct a new ConEdgeIt iterating on the edges that |
1173 | 1173 |
/// connects nodes \c u and \c v. |
1174 | 1174 |
ConEdgeIt(const Graph& g, Node u, Node v) : _graph(g), _u(u), _v(v) { |
1175 | 1175 |
Parent::operator=(findEdge(_graph, _u, _v)); |
1176 | 1176 |
} |
1177 | 1177 |
|
1178 | 1178 |
/// \brief Constructor. |
1179 | 1179 |
/// |
1180 | 1180 |
/// Construct a new ConEdgeIt that continues iterating from edge \c e. |
1181 | 1181 |
ConEdgeIt(const Graph& g, Edge e) : Parent(e), _graph(g) {} |
1182 | 1182 |
|
1183 | 1183 |
/// \brief Increment operator. |
1184 | 1184 |
/// |
1185 | 1185 |
/// It increments the iterator and gives back the next edge. |
1186 | 1186 |
ConEdgeIt& operator++() { |
1187 | 1187 |
Parent::operator=(findEdge(_graph, _u, _v, *this)); |
1188 | 1188 |
return *this; |
1189 | 1189 |
} |
1190 | 1190 |
private: |
1191 | 1191 |
const Graph& _graph; |
1192 | 1192 |
Node _u, _v; |
1193 | 1193 |
}; |
1194 | 1194 |
|
1195 | 1195 |
|
1196 | 1196 |
///Dynamic arc look-up between given endpoints. |
1197 | 1197 |
|
1198 | 1198 |
///Using this class, you can find an arc in a digraph from a given |
1199 | 1199 |
///source to a given target in amortized time <em>O</em>(log<em>d</em>), |
1200 | 1200 |
///where <em>d</em> is the out-degree of the source node. |
1201 | 1201 |
/// |
1202 | 1202 |
///It is possible to find \e all parallel arcs between two nodes with |
1203 | 1203 |
///the \c operator() member. |
1204 | 1204 |
/// |
1205 | 1205 |
///This is a dynamic data structure. Consider to use \ref ArcLookUp or |
1206 | 1206 |
///\ref AllArcLookUp if your digraph is not changed so frequently. |
1207 | 1207 |
/// |
1208 | 1208 |
///This class uses a self-adjusting binary search tree, the Splay tree |
1209 | 1209 |
///of Sleator and Tarjan to guarantee the logarithmic amortized |
1210 | 1210 |
///time bound for arc look-ups. This class also guarantees the |
1211 | 1211 |
///optimal time bound in a constant factor for any distribution of |
1212 | 1212 |
///queries. |
1213 | 1213 |
/// |
1214 | 1214 |
///\tparam GR The type of the underlying digraph. |
1215 | 1215 |
/// |
1216 | 1216 |
///\sa ArcLookUp |
1217 | 1217 |
///\sa AllArcLookUp |
1218 | 1218 |
template <typename GR> |
1219 | 1219 |
class DynArcLookUp |
1220 | 1220 |
: protected ItemSetTraits<GR, typename GR::Arc>::ItemNotifier::ObserverBase |
1221 | 1221 |
{ |
1222 | 1222 |
public: |
1223 | 1223 |
typedef typename ItemSetTraits<GR, typename GR::Arc> |
1224 | 1224 |
::ItemNotifier::ObserverBase Parent; |
1225 | 1225 |
|
1226 | 1226 |
TEMPLATE_DIGRAPH_TYPEDEFS(GR); |
1227 | 1227 |
typedef GR Digraph; |
1228 | 1228 |
|
1229 | 1229 |
protected: |
1230 | 1230 |
|
1231 | 1231 |
class AutoNodeMap : public ItemSetTraits<GR, Node>::template Map<Arc>::Type { |
1232 | 1232 |
public: |
1233 | 1233 |
|
1234 | 1234 |
typedef typename ItemSetTraits<GR, Node>::template Map<Arc>::Type Parent; |
1235 | 1235 |
|
1236 | 1236 |
AutoNodeMap(const GR& digraph) : Parent(digraph, INVALID) {} |
1237 | 1237 |
|
1238 | 1238 |
virtual void add(const Node& node) { |
1239 | 1239 |
Parent::add(node); |
1240 | 1240 |
Parent::set(node, INVALID); |
1241 | 1241 |
} |
1242 | 1242 |
|
1243 | 1243 |
virtual void add(const std::vector<Node>& nodes) { |
1244 | 1244 |
Parent::add(nodes); |
1245 | 1245 |
for (int i = 0; i < int(nodes.size()); ++i) { |
1246 | 1246 |
Parent::set(nodes[i], INVALID); |
1247 | 1247 |
} |
1248 | 1248 |
} |
1249 | 1249 |
|
1250 | 1250 |
virtual void build() { |
1251 | 1251 |
Parent::build(); |
1252 | 1252 |
Node it; |
1253 | 1253 |
typename Parent::Notifier* nf = Parent::notifier(); |
1254 | 1254 |
for (nf->first(it); it != INVALID; nf->next(it)) { |
1255 | 1255 |
Parent::set(it, INVALID); |
1256 | 1256 |
} |
1257 | 1257 |
} |
1258 | 1258 |
}; |
1259 | 1259 |
|
1260 | 1260 |
const Digraph &_g; |
1261 | 1261 |
AutoNodeMap _head; |
1262 | 1262 |
typename Digraph::template ArcMap<Arc> _parent; |
1263 | 1263 |
typename Digraph::template ArcMap<Arc> _left; |
1264 | 1264 |
typename Digraph::template ArcMap<Arc> _right; |
1265 | 1265 |
|
1266 | 1266 |
class ArcLess { |
1267 | 1267 |
const Digraph &g; |
1268 | 1268 |
public: |
1269 | 1269 |
ArcLess(const Digraph &_g) : g(_g) {} |
1270 | 1270 |
bool operator()(Arc a,Arc b) const |
1271 | 1271 |
{ |
1272 | 1272 |
return g.target(a)<g.target(b); |
1273 | 1273 |
} |
1274 | 1274 |
}; |
1275 | 1275 |
|
1276 | 1276 |
public: |
1277 | 1277 |
|
1278 | 1278 |
///Constructor |
1279 | 1279 |
|
1280 | 1280 |
///Constructor. |
1281 | 1281 |
/// |
1282 | 1282 |
///It builds up the search database. |
1283 | 1283 |
DynArcLookUp(const Digraph &g) |
1284 | 1284 |
: _g(g),_head(g),_parent(g),_left(g),_right(g) |
1285 | 1285 |
{ |
1286 | 1286 |
Parent::attach(_g.notifier(typename Digraph::Arc())); |
1287 | 1287 |
refresh(); |
1288 | 1288 |
} |
1289 | 1289 |
|
1290 | 1290 |
protected: |
1291 | 1291 |
|
1292 | 1292 |
virtual void add(const Arc& arc) { |
1293 | 1293 |
insert(arc); |
1294 | 1294 |
} |
1295 | 1295 |
|
1296 | 1296 |
virtual void add(const std::vector<Arc>& arcs) { |
1297 | 1297 |
for (int i = 0; i < int(arcs.size()); ++i) { |
1298 | 1298 |
insert(arcs[i]); |
1299 | 1299 |
} |
1300 | 1300 |
} |
1301 | 1301 |
|
1302 | 1302 |
virtual void erase(const Arc& arc) { |
1303 | 1303 |
remove(arc); |
1304 | 1304 |
} |
1305 | 1305 |
|
1306 | 1306 |
virtual void erase(const std::vector<Arc>& arcs) { |
1307 | 1307 |
for (int i = 0; i < int(arcs.size()); ++i) { |
1308 | 1308 |
remove(arcs[i]); |
1309 | 1309 |
} |
1310 | 1310 |
} |
1311 | 1311 |
|
1312 | 1312 |
virtual void build() { |
1313 | 1313 |
refresh(); |
1314 | 1314 |
} |
1315 | 1315 |
|
1316 | 1316 |
virtual void clear() { |
1317 | 1317 |
for(NodeIt n(_g);n!=INVALID;++n) { |
1318 |
_head |
|
1318 |
_head[n] = INVALID; |
|
1319 | 1319 |
} |
1320 | 1320 |
} |
1321 | 1321 |
|
1322 | 1322 |
void insert(Arc arc) { |
1323 | 1323 |
Node s = _g.source(arc); |
1324 | 1324 |
Node t = _g.target(arc); |
1325 |
_left.set(arc, INVALID); |
|
1326 |
_right.set(arc, INVALID); |
|
1325 |
_left[arc] = INVALID; |
|
1326 |
_right[arc] = INVALID; |
|
1327 | 1327 |
|
1328 | 1328 |
Arc e = _head[s]; |
1329 | 1329 |
if (e == INVALID) { |
1330 |
_head.set(s, arc); |
|
1331 |
_parent.set(arc, INVALID); |
|
1330 |
_head[s] = arc; |
|
1331 |
_parent[arc] = INVALID; |
|
1332 | 1332 |
return; |
1333 | 1333 |
} |
1334 | 1334 |
while (true) { |
1335 | 1335 |
if (t < _g.target(e)) { |
1336 | 1336 |
if (_left[e] == INVALID) { |
1337 |
_left.set(e, arc); |
|
1338 |
_parent.set(arc, e); |
|
1337 |
_left[e] = arc; |
|
1338 |
_parent[arc] = e; |
|
1339 | 1339 |
splay(arc); |
1340 | 1340 |
return; |
1341 | 1341 |
} else { |
1342 | 1342 |
e = _left[e]; |
1343 | 1343 |
} |
1344 | 1344 |
} else { |
1345 | 1345 |
if (_right[e] == INVALID) { |
1346 |
_right.set(e, arc); |
|
1347 |
_parent.set(arc, e); |
|
1346 |
_right[e] = arc; |
|
1347 |
_parent[arc] = e; |
|
1348 | 1348 |
splay(arc); |
1349 | 1349 |
return; |
1350 | 1350 |
} else { |
1351 | 1351 |
e = _right[e]; |
1352 | 1352 |
} |
1353 | 1353 |
} |
1354 | 1354 |
} |
1355 | 1355 |
} |
1356 | 1356 |
|
1357 | 1357 |
void remove(Arc arc) { |
1358 | 1358 |
if (_left[arc] == INVALID) { |
1359 | 1359 |
if (_right[arc] != INVALID) { |
1360 |
_parent |
|
1360 |
_parent[_right[arc]] = _parent[arc]; |
|
1361 | 1361 |
} |
1362 | 1362 |
if (_parent[arc] != INVALID) { |
1363 | 1363 |
if (_left[_parent[arc]] == arc) { |
1364 |
_left |
|
1364 |
_left[_parent[arc]] = _right[arc]; |
|
1365 | 1365 |
} else { |
1366 |
_right |
|
1366 |
_right[_parent[arc]] = _right[arc]; |
|
1367 | 1367 |
} |
1368 | 1368 |
} else { |
1369 |
_head |
|
1369 |
_head[_g.source(arc)] = _right[arc]; |
|
1370 | 1370 |
} |
1371 | 1371 |
} else if (_right[arc] == INVALID) { |
1372 |
_parent |
|
1372 |
_parent[_left[arc]] = _parent[arc]; |
|
1373 | 1373 |
if (_parent[arc] != INVALID) { |
1374 | 1374 |
if (_left[_parent[arc]] == arc) { |
1375 |
_left |
|
1375 |
_left[_parent[arc]] = _left[arc]; |
|
1376 | 1376 |
} else { |
1377 |
_right |
|
1377 |
_right[_parent[arc]] = _left[arc]; |
|
1378 | 1378 |
} |
1379 | 1379 |
} else { |
1380 |
_head |
|
1380 |
_head[_g.source(arc)] = _left[arc]; |
|
1381 | 1381 |
} |
1382 | 1382 |
} else { |
1383 | 1383 |
Arc e = _left[arc]; |
1384 | 1384 |
if (_right[e] != INVALID) { |
1385 | 1385 |
e = _right[e]; |
1386 | 1386 |
while (_right[e] != INVALID) { |
1387 | 1387 |
e = _right[e]; |
1388 | 1388 |
} |
1389 | 1389 |
Arc s = _parent[e]; |
1390 |
_right |
|
1390 |
_right[_parent[e]] = _left[e]; |
|
1391 | 1391 |
if (_left[e] != INVALID) { |
1392 |
_parent |
|
1392 |
_parent[_left[e]] = _parent[e]; |
|
1393 | 1393 |
} |
1394 | 1394 |
|
1395 |
_left.set(e, _left[arc]); |
|
1396 |
_parent.set(_left[arc], e); |
|
1397 |
_right.set(e, _right[arc]); |
|
1398 |
_parent.set(_right[arc], e); |
|
1395 |
_left[e] = _left[arc]; |
|
1396 |
_parent[_left[arc]] = e; |
|
1397 |
_right[e] = _right[arc]; |
|
1398 |
_parent[_right[arc]] = e; |
|
1399 | 1399 |
|
1400 |
_parent |
|
1400 |
_parent[e] = _parent[arc]; |
|
1401 | 1401 |
if (_parent[arc] != INVALID) { |
1402 | 1402 |
if (_left[_parent[arc]] == arc) { |
1403 |
_left |
|
1403 |
_left[_parent[arc]] = e; |
|
1404 | 1404 |
} else { |
1405 |
_right |
|
1405 |
_right[_parent[arc]] = e; |
|
1406 | 1406 |
} |
1407 | 1407 |
} |
1408 | 1408 |
splay(s); |
1409 | 1409 |
} else { |
1410 |
_right.set(e, _right[arc]); |
|
1411 |
_parent.set(_right[arc], e); |
|
1412 |
|
|
1410 |
_right[e] = _right[arc]; |
|
1411 |
_parent[_right[arc]] = e; |
|
1412 |
_parent[e] = _parent[arc]; |
|
1413 | 1413 |
|
1414 | 1414 |
if (_parent[arc] != INVALID) { |
1415 | 1415 |
if (_left[_parent[arc]] == arc) { |
1416 |
_left |
|
1416 |
_left[_parent[arc]] = e; |
|
1417 | 1417 |
} else { |
1418 |
_right |
|
1418 |
_right[_parent[arc]] = e; |
|
1419 | 1419 |
} |
1420 | 1420 |
} else { |
1421 |
_head |
|
1421 |
_head[_g.source(arc)] = e; |
|
1422 | 1422 |
} |
1423 | 1423 |
} |
1424 | 1424 |
} |
1425 | 1425 |
} |
1426 | 1426 |
|
1427 | 1427 |
Arc refreshRec(std::vector<Arc> &v,int a,int b) |
1428 | 1428 |
{ |
1429 | 1429 |
int m=(a+b)/2; |
1430 | 1430 |
Arc me=v[m]; |
1431 | 1431 |
if (a < m) { |
1432 | 1432 |
Arc left = refreshRec(v,a,m-1); |
1433 |
_left.set(me, left); |
|
1434 |
_parent.set(left, me); |
|
1433 |
_left[me] = left; |
|
1434 |
_parent[left] = me; |
|
1435 | 1435 |
} else { |
1436 |
_left |
|
1436 |
_left[me] = INVALID; |
|
1437 | 1437 |
} |
1438 | 1438 |
if (m < b) { |
1439 | 1439 |
Arc right = refreshRec(v,m+1,b); |
1440 |
_right.set(me, right); |
|
1441 |
_parent.set(right, me); |
|
1440 |
_right[me] = right; |
|
1441 |
_parent[right] = me; |
|
1442 | 1442 |
} else { |
1443 |
_right |
|
1443 |
_right[me] = INVALID; |
|
1444 | 1444 |
} |
1445 | 1445 |
return me; |
1446 | 1446 |
} |
1447 | 1447 |
|
1448 | 1448 |
void refresh() { |
1449 | 1449 |
for(NodeIt n(_g);n!=INVALID;++n) { |
1450 | 1450 |
std::vector<Arc> v; |
1451 | 1451 |
for(OutArcIt a(_g,n);a!=INVALID;++a) v.push_back(a); |
1452 | 1452 |
if (!v.empty()) { |
1453 | 1453 |
std::sort(v.begin(),v.end(),ArcLess(_g)); |
1454 | 1454 |
Arc head = refreshRec(v,0,v.size()-1); |
1455 |
_head.set(n, head); |
|
1456 |
_parent.set(head, INVALID); |
|
1455 |
_head[n] = head; |
|
1456 |
_parent[head] = INVALID; |
|
1457 | 1457 |
} |
1458 |
else _head |
|
1458 |
else _head[n] = INVALID; |
|
1459 | 1459 |
} |
1460 | 1460 |
} |
1461 | 1461 |
|
1462 | 1462 |
void zig(Arc v) { |
1463 | 1463 |
Arc w = _parent[v]; |
1464 |
_parent.set(v, _parent[w]); |
|
1465 |
_parent.set(w, v); |
|
1466 |
_left.set(w, _right[v]); |
|
1467 |
_right.set(v, w); |
|
1464 |
_parent[v] = _parent[w]; |
|
1465 |
_parent[w] = v; |
|
1466 |
_left[w] = _right[v]; |
|
1467 |
_right[v] = w; |
|
1468 | 1468 |
if (_parent[v] != INVALID) { |
1469 | 1469 |
if (_right[_parent[v]] == w) { |
1470 |
_right |
|
1470 |
_right[_parent[v]] = v; |
|
1471 | 1471 |
} else { |
1472 |
_left |
|
1472 |
_left[_parent[v]] = v; |
|
1473 | 1473 |
} |
1474 | 1474 |
} |
1475 | 1475 |
if (_left[w] != INVALID){ |
1476 |
_parent |
|
1476 |
_parent[_left[w]] = w; |
|
1477 | 1477 |
} |
1478 | 1478 |
} |
1479 | 1479 |
|
1480 | 1480 |
void zag(Arc v) { |
1481 | 1481 |
Arc w = _parent[v]; |
1482 |
_parent.set(v, _parent[w]); |
|
1483 |
_parent.set(w, v); |
|
1484 |
_right.set(w, _left[v]); |
|
1485 |
_left.set(v, w); |
|
1482 |
_parent[v] = _parent[w]; |
|
1483 |
_parent[w] = v; |
|
1484 |
_right[w] = _left[v]; |
|
1485 |
_left[v] = w; |
|
1486 | 1486 |
if (_parent[v] != INVALID){ |
1487 | 1487 |
if (_left[_parent[v]] == w) { |
1488 |
_left |
|
1488 |
_left[_parent[v]] = v; |
|
1489 | 1489 |
} else { |
1490 |
_right |
|
1490 |
_right[_parent[v]] = v; |
|
1491 | 1491 |
} |
1492 | 1492 |
} |
1493 | 1493 |
if (_right[w] != INVALID){ |
1494 |
_parent |
|
1494 |
_parent[_right[w]] = w; |
|
1495 | 1495 |
} |
1496 | 1496 |
} |
1497 | 1497 |
|
1498 | 1498 |
void splay(Arc v) { |
1499 | 1499 |
while (_parent[v] != INVALID) { |
1500 | 1500 |
if (v == _left[_parent[v]]) { |
1501 | 1501 |
if (_parent[_parent[v]] == INVALID) { |
1502 | 1502 |
zig(v); |
1503 | 1503 |
} else { |
1504 | 1504 |
if (_parent[v] == _left[_parent[_parent[v]]]) { |
1505 | 1505 |
zig(_parent[v]); |
1506 | 1506 |
zig(v); |
1507 | 1507 |
} else { |
1508 | 1508 |
zig(v); |
1509 | 1509 |
zag(v); |
1510 | 1510 |
} |
1511 | 1511 |
} |
1512 | 1512 |
} else { |
1513 | 1513 |
if (_parent[_parent[v]] == INVALID) { |
1514 | 1514 |
zag(v); |
1515 | 1515 |
} else { |
1516 | 1516 |
if (_parent[v] == _left[_parent[_parent[v]]]) { |
1517 | 1517 |
zag(v); |
1518 | 1518 |
zig(v); |
1519 | 1519 |
} else { |
1520 | 1520 |
zag(_parent[v]); |
1521 | 1521 |
zag(v); |
1522 | 1522 |
} |
1523 | 1523 |
} |
1524 | 1524 |
} |
1525 | 1525 |
} |
1526 | 1526 |
_head[_g.source(v)] = v; |
1527 | 1527 |
} |
1528 | 1528 |
|
1529 | 1529 |
|
1530 | 1530 |
public: |
1531 | 1531 |
|
1532 | 1532 |
///Find an arc between two nodes. |
1533 | 1533 |
|
1534 | 1534 |
///Find an arc between two nodes. |
1535 | 1535 |
///\param s The source node. |
1536 | 1536 |
///\param t The target node. |
1537 | 1537 |
///\param p The previous arc between \c s and \c t. It it is INVALID or |
1538 | 1538 |
///not given, the operator finds the first appropriate arc. |
1539 | 1539 |
///\return An arc from \c s to \c t after \c p or |
1540 | 1540 |
///\ref INVALID if there is no more. |
1541 | 1541 |
/// |
1542 | 1542 |
///For example, you can count the number of arcs from \c u to \c v in the |
1543 | 1543 |
///following way. |
1544 | 1544 |
///\code |
1545 | 1545 |
///DynArcLookUp<ListDigraph> ae(g); |
1546 | 1546 |
///... |
1547 | 1547 |
///int n = 0; |
1548 | 1548 |
///for(Arc a = ae(u,v); a != INVALID; a = ae(u,v,a)) n++; |
1549 | 1549 |
///\endcode |
1550 | 1550 |
/// |
1551 | 1551 |
///Finding the arcs take at most <em>O</em>(log<em>d</em>) |
1552 | 1552 |
///amortized time, specifically, the time complexity of the lookups |
1553 | 1553 |
///is equal to the optimal search tree implementation for the |
1554 | 1554 |
///current query distribution in a constant factor. |
1555 | 1555 |
/// |
1556 | 1556 |
///\note This is a dynamic data structure, therefore the data |
1557 | 1557 |
///structure is updated after each graph alteration. Thus although |
1558 | 1558 |
///this data structure is theoretically faster than \ref ArcLookUp |
1559 | 1559 |
///and \ref AllArcLookUp, it often provides worse performance than |
1560 | 1560 |
///them. |
1561 | 1561 |
Arc operator()(Node s, Node t, Arc p = INVALID) const { |
1562 | 1562 |
if (p == INVALID) { |
1563 | 1563 |
Arc a = _head[s]; |
1564 | 1564 |
if (a == INVALID) return INVALID; |
1565 | 1565 |
Arc r = INVALID; |
1566 | 1566 |
while (true) { |
1567 | 1567 |
if (_g.target(a) < t) { |
1568 | 1568 |
if (_right[a] == INVALID) { |
1569 | 1569 |
const_cast<DynArcLookUp&>(*this).splay(a); |
1570 | 1570 |
return r; |
1571 | 1571 |
} else { |
1572 | 1572 |
a = _right[a]; |
1573 | 1573 |
} |
1574 | 1574 |
} else { |
1575 | 1575 |
if (_g.target(a) == t) { |
1576 | 1576 |
r = a; |
1577 | 1577 |
} |
1578 | 1578 |
if (_left[a] == INVALID) { |
1579 | 1579 |
const_cast<DynArcLookUp&>(*this).splay(a); |
1580 | 1580 |
return r; |
1581 | 1581 |
} else { |
1582 | 1582 |
a = _left[a]; |
1583 | 1583 |
} |
1584 | 1584 |
} |
1585 | 1585 |
} |
1586 | 1586 |
} else { |
1587 | 1587 |
Arc a = p; |
1588 | 1588 |
if (_right[a] != INVALID) { |
1589 | 1589 |
a = _right[a]; |
1590 | 1590 |
while (_left[a] != INVALID) { |
1591 | 1591 |
a = _left[a]; |
1592 | 1592 |
} |
1593 | 1593 |
const_cast<DynArcLookUp&>(*this).splay(a); |
1594 | 1594 |
} else { |
1595 | 1595 |
while (_parent[a] != INVALID && _right[_parent[a]] == a) { |
1596 | 1596 |
a = _parent[a]; |
1597 | 1597 |
} |
1598 | 1598 |
if (_parent[a] == INVALID) { |
1599 | 1599 |
return INVALID; |
1600 | 1600 |
} else { |
1601 | 1601 |
a = _parent[a]; |
1602 | 1602 |
const_cast<DynArcLookUp&>(*this).splay(a); |
1603 | 1603 |
} |
1604 | 1604 |
} |
1605 | 1605 |
if (_g.target(a) == t) return a; |
1606 | 1606 |
else return INVALID; |
1607 | 1607 |
} |
1608 | 1608 |
} |
1609 | 1609 |
|
1610 | 1610 |
}; |
1611 | 1611 |
|
1612 | 1612 |
///Fast arc look-up between given endpoints. |
1613 | 1613 |
|
1614 | 1614 |
///Using this class, you can find an arc in a digraph from a given |
1615 | 1615 |
///source to a given target in time <em>O</em>(log<em>d</em>), |
1616 | 1616 |
///where <em>d</em> is the out-degree of the source node. |
1617 | 1617 |
/// |
1618 | 1618 |
///It is not possible to find \e all parallel arcs between two nodes. |
1619 | 1619 |
///Use \ref AllArcLookUp for this purpose. |
1620 | 1620 |
/// |
1621 | 1621 |
///\warning This class is static, so you should call refresh() (or at |
1622 | 1622 |
///least refresh(Node)) to refresh this data structure whenever the |
1623 | 1623 |
///digraph changes. This is a time consuming (superlinearly proportional |
1624 | 1624 |
///(<em>O</em>(<em>m</em> log<em>m</em>)) to the number of arcs). |
1625 | 1625 |
/// |
1626 | 1626 |
///\tparam GR The type of the underlying digraph. |
1627 | 1627 |
/// |
1628 | 1628 |
///\sa DynArcLookUp |
1629 | 1629 |
///\sa AllArcLookUp |
1630 | 1630 |
template<class GR> |
1631 | 1631 |
class ArcLookUp |
1632 | 1632 |
{ |
1633 | 1633 |
public: |
1634 | 1634 |
TEMPLATE_DIGRAPH_TYPEDEFS(GR); |
1635 | 1635 |
typedef GR Digraph; |
1636 | 1636 |
|
1637 | 1637 |
protected: |
1638 | 1638 |
const Digraph &_g; |
1639 | 1639 |
typename Digraph::template NodeMap<Arc> _head; |
1640 | 1640 |
typename Digraph::template ArcMap<Arc> _left; |
1641 | 1641 |
typename Digraph::template ArcMap<Arc> _right; |
1642 | 1642 |
|
1643 | 1643 |
class ArcLess { |
1644 | 1644 |
const Digraph &g; |
1645 | 1645 |
public: |
1646 | 1646 |
ArcLess(const Digraph &_g) : g(_g) {} |
1647 | 1647 |
bool operator()(Arc a,Arc b) const |
1648 | 1648 |
{ |
1649 | 1649 |
return g.target(a)<g.target(b); |
1650 | 1650 |
} |
1651 | 1651 |
}; |
1652 | 1652 |
|
1653 | 1653 |
public: |
1654 | 1654 |
|
1655 | 1655 |
///Constructor |
1656 | 1656 |
|
1657 | 1657 |
///Constructor. |
1658 | 1658 |
/// |
1659 | 1659 |
///It builds up the search database, which remains valid until the digraph |
1660 | 1660 |
///changes. |
1661 | 1661 |
ArcLookUp(const Digraph &g) :_g(g),_head(g),_left(g),_right(g) {refresh();} |
1662 | 1662 |
|
1663 | 1663 |
private: |
1664 | 1664 |
Arc refreshRec(std::vector<Arc> &v,int a,int b) |
1665 | 1665 |
{ |
1666 | 1666 |
int m=(a+b)/2; |
1667 | 1667 |
Arc me=v[m]; |
1668 | 1668 |
_left[me] = a<m?refreshRec(v,a,m-1):INVALID; |
1669 | 1669 |
_right[me] = m<b?refreshRec(v,m+1,b):INVALID; |
1670 | 1670 |
return me; |
1671 | 1671 |
} |
1672 | 1672 |
public: |
1673 | 1673 |
///Refresh the search data structure at a node. |
1674 | 1674 |
|
1675 | 1675 |
///Build up the search database of node \c n. |
1676 | 1676 |
/// |
1677 | 1677 |
///It runs in time <em>O</em>(<em>d</em> log<em>d</em>), where <em>d</em> |
1678 | 1678 |
///is the number of the outgoing arcs of \c n. |
1679 | 1679 |
void refresh(Node n) |
1680 | 1680 |
{ |
1681 | 1681 |
std::vector<Arc> v; |
1682 | 1682 |
for(OutArcIt e(_g,n);e!=INVALID;++e) v.push_back(e); |
1683 | 1683 |
if(v.size()) { |
1684 | 1684 |
std::sort(v.begin(),v.end(),ArcLess(_g)); |
1685 | 1685 |
_head[n]=refreshRec(v,0,v.size()-1); |
1686 | 1686 |
} |
1687 | 1687 |
else _head[n]=INVALID; |
1688 | 1688 |
} |
1689 | 1689 |
///Refresh the full data structure. |
1690 | 1690 |
|
1691 | 1691 |
///Build up the full search database. In fact, it simply calls |
1692 | 1692 |
///\ref refresh(Node) "refresh(n)" for each node \c n. |
1693 | 1693 |
/// |
1694 | 1694 |
///It runs in time <em>O</em>(<em>m</em> log<em>D</em>), where <em>m</em> is |
1695 | 1695 |
///the number of the arcs in the digraph and <em>D</em> is the maximum |
1696 | 1696 |
///out-degree of the digraph. |
1697 | 1697 |
void refresh() |
1698 | 1698 |
{ |
1699 | 1699 |
for(NodeIt n(_g);n!=INVALID;++n) refresh(n); |
1700 | 1700 |
} |
1701 | 1701 |
|
1702 | 1702 |
///Find an arc between two nodes. |
1703 | 1703 |
|
1704 | 1704 |
///Find an arc between two nodes in time <em>O</em>(log<em>d</em>), |
1705 | 1705 |
///where <em>d</em> is the number of outgoing arcs of \c s. |
1706 | 1706 |
///\param s The source node. |
1707 | 1707 |
///\param t The target node. |
1708 | 1708 |
///\return An arc from \c s to \c t if there exists, |
1709 | 1709 |
///\ref INVALID otherwise. |
1710 | 1710 |
/// |
1711 | 1711 |
///\warning If you change the digraph, refresh() must be called before using |
1712 | 1712 |
///this operator. If you change the outgoing arcs of |
1713 | 1713 |
///a single node \c n, then \ref refresh(Node) "refresh(n)" is enough. |
1714 | 1714 |
Arc operator()(Node s, Node t) const |
1715 | 1715 |
{ |
1716 | 1716 |
Arc e; |
1717 | 1717 |
for(e=_head[s]; |
1718 | 1718 |
e!=INVALID&&_g.target(e)!=t; |
1719 | 1719 |
e = t < _g.target(e)?_left[e]:_right[e]) ; |
1720 | 1720 |
return e; |
1721 | 1721 |
} |
1722 | 1722 |
|
1723 | 1723 |
}; |
1724 | 1724 |
|
1725 | 1725 |
///Fast look-up of all arcs between given endpoints. |
1726 | 1726 |
|
1727 | 1727 |
///This class is the same as \ref ArcLookUp, with the addition |
1728 | 1728 |
///that it makes it possible to find all parallel arcs between given |
1729 | 1729 |
///endpoints. |
1730 | 1730 |
/// |
1731 | 1731 |
///\warning This class is static, so you should call refresh() (or at |
1732 | 1732 |
///least refresh(Node)) to refresh this data structure whenever the |
1733 | 1733 |
///digraph changes. This is a time consuming (superlinearly proportional |
1734 | 1734 |
///(<em>O</em>(<em>m</em> log<em>m</em>)) to the number of arcs). |
1735 | 1735 |
/// |
1736 | 1736 |
///\tparam GR The type of the underlying digraph. |
1737 | 1737 |
/// |
1738 | 1738 |
///\sa DynArcLookUp |
1739 | 1739 |
///\sa ArcLookUp |
1740 | 1740 |
template<class GR> |
1741 | 1741 |
class AllArcLookUp : public ArcLookUp<GR> |
1742 | 1742 |
{ |
1743 | 1743 |
using ArcLookUp<GR>::_g; |
1744 | 1744 |
using ArcLookUp<GR>::_right; |
1745 | 1745 |
using ArcLookUp<GR>::_left; |
1746 | 1746 |
using ArcLookUp<GR>::_head; |
1747 | 1747 |
|
1748 | 1748 |
TEMPLATE_DIGRAPH_TYPEDEFS(GR); |
1749 | 1749 |
typedef GR Digraph; |
1750 | 1750 |
|
1751 | 1751 |
typename Digraph::template ArcMap<Arc> _next; |
1752 | 1752 |
|
1753 | 1753 |
Arc refreshNext(Arc head,Arc next=INVALID) |
1754 | 1754 |
{ |
1755 | 1755 |
if(head==INVALID) return next; |
1756 | 1756 |
else { |
1757 | 1757 |
next=refreshNext(_right[head],next); |
1758 | 1758 |
_next[head]=( next!=INVALID && _g.target(next)==_g.target(head)) |
1759 | 1759 |
? next : INVALID; |
1760 | 1760 |
return refreshNext(_left[head],head); |
1761 | 1761 |
} |
1762 | 1762 |
} |
1763 | 1763 |
|
1764 | 1764 |
void refreshNext() |
1765 | 1765 |
{ |
1766 | 1766 |
for(NodeIt n(_g);n!=INVALID;++n) refreshNext(_head[n]); |
1767 | 1767 |
} |
1768 | 1768 |
|
1769 | 1769 |
public: |
1770 | 1770 |
///Constructor |
1771 | 1771 |
|
1772 | 1772 |
///Constructor. |
1773 | 1773 |
/// |
1774 | 1774 |
///It builds up the search database, which remains valid until the digraph |
1775 | 1775 |
///changes. |
1776 | 1776 |
AllArcLookUp(const Digraph &g) : ArcLookUp<GR>(g), _next(g) {refreshNext();} |
1777 | 1777 |
|
1778 | 1778 |
///Refresh the data structure at a node. |
1779 | 1779 |
|
1780 | 1780 |
///Build up the search database of node \c n. |
1781 | 1781 |
/// |
1782 | 1782 |
///It runs in time <em>O</em>(<em>d</em> log<em>d</em>), where <em>d</em> is |
1783 | 1783 |
///the number of the outgoing arcs of \c n. |
1784 | 1784 |
void refresh(Node n) |
1785 | 1785 |
{ |
1786 | 1786 |
ArcLookUp<GR>::refresh(n); |
1787 | 1787 |
refreshNext(_head[n]); |
1788 | 1788 |
} |
1789 | 1789 |
|
1790 | 1790 |
///Refresh the full data structure. |
1791 | 1791 |
|
1792 | 1792 |
///Build up the full search database. In fact, it simply calls |
1793 | 1793 |
///\ref refresh(Node) "refresh(n)" for each node \c n. |
1794 | 1794 |
/// |
1795 | 1795 |
///It runs in time <em>O</em>(<em>m</em> log<em>D</em>), where <em>m</em> is |
1796 | 1796 |
///the number of the arcs in the digraph and <em>D</em> is the maximum |
1797 | 1797 |
///out-degree of the digraph. |
1798 | 1798 |
void refresh() |
1799 | 1799 |
{ |
1800 | 1800 |
for(NodeIt n(_g);n!=INVALID;++n) refresh(_head[n]); |
1801 | 1801 |
} |
1802 | 1802 |
|
1803 | 1803 |
///Find an arc between two nodes. |
1804 | 1804 |
|
1805 | 1805 |
///Find an arc between two nodes. |
1806 | 1806 |
///\param s The source node. |
1807 | 1807 |
///\param t The target node. |
1808 | 1808 |
///\param prev The previous arc between \c s and \c t. It it is INVALID or |
1809 | 1809 |
///not given, the operator finds the first appropriate arc. |
1810 | 1810 |
///\return An arc from \c s to \c t after \c prev or |
1811 | 1811 |
///\ref INVALID if there is no more. |
1812 | 1812 |
/// |
1813 | 1813 |
///For example, you can count the number of arcs from \c u to \c v in the |
1814 | 1814 |
///following way. |
1815 | 1815 |
///\code |
1816 | 1816 |
///AllArcLookUp<ListDigraph> ae(g); |
1817 | 1817 |
///... |
1818 | 1818 |
///int n = 0; |
1819 | 1819 |
///for(Arc a = ae(u,v); a != INVALID; a=ae(u,v,a)) n++; |
1820 | 1820 |
///\endcode |
1821 | 1821 |
/// |
1822 | 1822 |
///Finding the first arc take <em>O</em>(log<em>d</em>) time, |
1823 | 1823 |
///where <em>d</em> is the number of outgoing arcs of \c s. Then the |
1824 | 1824 |
///consecutive arcs are found in constant time. |
1825 | 1825 |
/// |
1826 | 1826 |
///\warning If you change the digraph, refresh() must be called before using |
1827 | 1827 |
///this operator. If you change the outgoing arcs of |
1828 | 1828 |
///a single node \c n, then \ref refresh(Node) "refresh(n)" is enough. |
1829 | 1829 |
/// |
1830 | 1830 |
#ifdef DOXYGEN |
1831 | 1831 |
Arc operator()(Node s, Node t, Arc prev=INVALID) const {} |
1832 | 1832 |
#else |
1833 | 1833 |
using ArcLookUp<GR>::operator() ; |
1834 | 1834 |
Arc operator()(Node s, Node t, Arc prev) const |
1835 | 1835 |
{ |
1836 | 1836 |
return prev==INVALID?(*this)(s,t):_next[prev]; |
1837 | 1837 |
} |
1838 | 1838 |
#endif |
1839 | 1839 |
|
1840 | 1840 |
}; |
1841 | 1841 |
|
1842 | 1842 |
/// @} |
1843 | 1843 |
|
1844 | 1844 |
} //namespace lemon |
1845 | 1845 |
|
1846 | 1846 |
#endif |
1 | 1 |
/* -*- mode: C++; indent-tabs-mode: nil; -*- |
2 | 2 |
* |
3 | 3 |
* This file is a part of LEMON, a generic C++ optimization library. |
4 | 4 |
* |
5 | 5 |
* Copyright (C) 2003-2009 |
6 | 6 |
* Egervary Jeno Kombinatorikus Optimalizalasi Kutatocsoport |
7 | 7 |
* (Egervary Research Group on Combinatorial Optimization, EGRES). |
8 | 8 |
* |
9 | 9 |
* Permission to use, modify and distribute this software is granted |
10 | 10 |
* provided that this copyright notice appears in all copies. For |
11 | 11 |
* precise terms see the accompanying LICENSE file. |
12 | 12 |
* |
13 | 13 |
* This software is provided "AS IS" with no warranty of any kind, |
14 | 14 |
* express or implied, and with no claim as to its suitability for any |
15 | 15 |
* purpose. |
16 | 16 |
* |
17 | 17 |
*/ |
18 | 18 |
|
19 | 19 |
#ifndef LEMON_ELEVATOR_H |
20 | 20 |
#define LEMON_ELEVATOR_H |
21 | 21 |
|
22 | 22 |
///\ingroup auxdat |
23 | 23 |
///\file |
24 | 24 |
///\brief Elevator class |
25 | 25 |
/// |
26 | 26 |
///Elevator class implements an efficient data structure |
27 | 27 |
///for labeling items in push-relabel type algorithms. |
28 | 28 |
/// |
29 | 29 |
|
30 | 30 |
#include <lemon/core.h> |
31 | 31 |
#include <lemon/bits/traits.h> |
32 | 32 |
|
33 | 33 |
namespace lemon { |
34 | 34 |
|
35 | 35 |
///Class for handling "labels" in push-relabel type algorithms. |
36 | 36 |
|
37 | 37 |
///A class for handling "labels" in push-relabel type algorithms. |
38 | 38 |
/// |
39 | 39 |
///\ingroup auxdat |
40 | 40 |
///Using this class you can assign "labels" (nonnegative integer numbers) |
41 | 41 |
///to the edges or nodes of a graph, manipulate and query them through |
42 | 42 |
///operations typically arising in "push-relabel" type algorithms. |
43 | 43 |
/// |
44 | 44 |
///Each item is either \em active or not, and you can also choose a |
45 | 45 |
///highest level active item. |
46 | 46 |
/// |
47 | 47 |
///\sa LinkedElevator |
48 | 48 |
/// |
49 | 49 |
///\param GR Type of the underlying graph. |
50 | 50 |
///\param Item Type of the items the data is assigned to (\c GR::Node, |
51 | 51 |
///\c GR::Arc or \c GR::Edge). |
52 | 52 |
template<class GR, class Item> |
53 | 53 |
class Elevator |
54 | 54 |
{ |
55 | 55 |
public: |
56 | 56 |
|
57 | 57 |
typedef Item Key; |
58 | 58 |
typedef int Value; |
59 | 59 |
|
60 | 60 |
private: |
61 | 61 |
|
62 | 62 |
typedef Item *Vit; |
63 | 63 |
typedef typename ItemSetTraits<GR,Item>::template Map<Vit>::Type VitMap; |
64 | 64 |
typedef typename ItemSetTraits<GR,Item>::template Map<int>::Type IntMap; |
65 | 65 |
|
66 | 66 |
const GR &_g; |
67 | 67 |
int _max_level; |
68 | 68 |
int _item_num; |
69 | 69 |
VitMap _where; |
70 | 70 |
IntMap _level; |
71 | 71 |
std::vector<Item> _items; |
72 | 72 |
std::vector<Vit> _first; |
73 | 73 |
std::vector<Vit> _last_active; |
74 | 74 |
|
75 | 75 |
int _highest_active; |
76 | 76 |
|
77 | 77 |
void copy(Item i, Vit p) |
78 | 78 |
{ |
79 |
_where |
|
79 |
_where[*p=i] = p; |
|
80 | 80 |
} |
81 | 81 |
void copy(Vit s, Vit p) |
82 | 82 |
{ |
83 | 83 |
if(s!=p) |
84 | 84 |
{ |
85 | 85 |
Item i=*s; |
86 | 86 |
*p=i; |
87 |
_where |
|
87 |
_where[i] = p; |
|
88 | 88 |
} |
89 | 89 |
} |
90 | 90 |
void swap(Vit i, Vit j) |
91 | 91 |
{ |
92 | 92 |
Item ti=*i; |
93 | 93 |
Vit ct = _where[ti]; |
94 |
_where.set(ti,_where[*i=*j]); |
|
95 |
_where.set(*j,ct); |
|
94 |
_where[ti] = _where[*i=*j]; |
|
95 |
_where[*j] = ct; |
|
96 | 96 |
*j=ti; |
97 | 97 |
} |
98 | 98 |
|
99 | 99 |
public: |
100 | 100 |
|
101 | 101 |
///Constructor with given maximum level. |
102 | 102 |
|
103 | 103 |
///Constructor with given maximum level. |
104 | 104 |
/// |
105 | 105 |
///\param graph The underlying graph. |
106 | 106 |
///\param max_level The maximum allowed level. |
107 | 107 |
///Set the range of the possible labels to <tt>[0..max_level]</tt>. |
108 | 108 |
Elevator(const GR &graph,int max_level) : |
109 | 109 |
_g(graph), |
110 | 110 |
_max_level(max_level), |
111 | 111 |
_item_num(_max_level), |
112 | 112 |
_where(graph), |
113 | 113 |
_level(graph,0), |
114 | 114 |
_items(_max_level), |
115 | 115 |
_first(_max_level+2), |
116 | 116 |
_last_active(_max_level+2), |
117 | 117 |
_highest_active(-1) {} |
118 | 118 |
///Constructor. |
119 | 119 |
|
120 | 120 |
///Constructor. |
121 | 121 |
/// |
122 | 122 |
///\param graph The underlying graph. |
123 | 123 |
///Set the range of the possible labels to <tt>[0..max_level]</tt>, |
124 | 124 |
///where \c max_level is equal to the number of labeled items in the graph. |
125 | 125 |
Elevator(const GR &graph) : |
126 | 126 |
_g(graph), |
127 | 127 |
_max_level(countItems<GR, Item>(graph)), |
128 | 128 |
_item_num(_max_level), |
129 | 129 |
_where(graph), |
130 | 130 |
_level(graph,0), |
131 | 131 |
_items(_max_level), |
132 | 132 |
_first(_max_level+2), |
133 | 133 |
_last_active(_max_level+2), |
134 | 134 |
_highest_active(-1) |
135 | 135 |
{ |
136 | 136 |
} |
137 | 137 |
|
138 | 138 |
///Activate item \c i. |
139 | 139 |
|
140 | 140 |
///Activate item \c i. |
141 | 141 |
///\pre Item \c i shouldn't be active before. |
142 | 142 |
void activate(Item i) |
143 | 143 |
{ |
144 | 144 |
const int l=_level[i]; |
145 | 145 |
swap(_where[i],++_last_active[l]); |
146 | 146 |
if(l>_highest_active) _highest_active=l; |
147 | 147 |
} |
148 | 148 |
|
149 | 149 |
///Deactivate item \c i. |
150 | 150 |
|
151 | 151 |
///Deactivate item \c i. |
152 | 152 |
///\pre Item \c i must be active before. |
153 | 153 |
void deactivate(Item i) |
154 | 154 |
{ |
155 | 155 |
swap(_where[i],_last_active[_level[i]]--); |
156 | 156 |
while(_highest_active>=0 && |
157 | 157 |
_last_active[_highest_active]<_first[_highest_active]) |
158 | 158 |
_highest_active--; |
159 | 159 |
} |
160 | 160 |
|
161 | 161 |
///Query whether item \c i is active |
162 | 162 |
bool active(Item i) const { return _where[i]<=_last_active[_level[i]]; } |
163 | 163 |
|
164 | 164 |
///Return the level of item \c i. |
165 | 165 |
int operator[](Item i) const { return _level[i]; } |
166 | 166 |
|
167 | 167 |
///Return the number of items on level \c l. |
168 | 168 |
int onLevel(int l) const |
169 | 169 |
{ |
170 | 170 |
return _first[l+1]-_first[l]; |
171 | 171 |
} |
172 | 172 |
///Return true if level \c l is empty. |
173 | 173 |
bool emptyLevel(int l) const |
174 | 174 |
{ |
175 | 175 |
return _first[l+1]-_first[l]==0; |
176 | 176 |
} |
177 | 177 |
///Return the number of items above level \c l. |
178 | 178 |
int aboveLevel(int l) const |
179 | 179 |
{ |
180 | 180 |
return _first[_max_level+1]-_first[l+1]; |
181 | 181 |
} |
182 | 182 |
///Return the number of active items on level \c l. |
183 | 183 |
int activesOnLevel(int l) const |
184 | 184 |
{ |
185 | 185 |
return _last_active[l]-_first[l]+1; |
186 | 186 |
} |
187 | 187 |
///Return true if there is no active item on level \c l. |
188 | 188 |
bool activeFree(int l) const |
189 | 189 |
{ |
190 | 190 |
return _last_active[l]<_first[l]; |
191 | 191 |
} |
192 | 192 |
///Return the maximum allowed level. |
193 | 193 |
int maxLevel() const |
194 | 194 |
{ |
195 | 195 |
return _max_level; |
196 | 196 |
} |
197 | 197 |
|
198 | 198 |
///\name Highest Active Item |
199 | 199 |
///Functions for working with the highest level |
200 | 200 |
///active item. |
201 | 201 |
|
202 | 202 |
///@{ |
203 | 203 |
|
204 | 204 |
///Return a highest level active item. |
205 | 205 |
|
206 | 206 |
///Return a highest level active item or INVALID if there is no active |
207 | 207 |
///item. |
208 | 208 |
Item highestActive() const |
209 | 209 |
{ |
210 | 210 |
return _highest_active>=0?*_last_active[_highest_active]:INVALID; |
211 | 211 |
} |
212 | 212 |
|
213 | 213 |
///Return the highest active level. |
214 | 214 |
|
215 | 215 |
///Return the level of the highest active item or -1 if there is no active |
216 | 216 |
///item. |
217 | 217 |
int highestActiveLevel() const |
218 | 218 |
{ |
219 | 219 |
return _highest_active; |
220 | 220 |
} |
221 | 221 |
|
222 | 222 |
///Lift the highest active item by one. |
223 | 223 |
|
224 | 224 |
///Lift the item returned by highestActive() by one. |
225 | 225 |
/// |
226 | 226 |
void liftHighestActive() |
227 | 227 |
{ |
228 | 228 |
Item it = *_last_active[_highest_active]; |
229 |
|
|
229 |
++_level[it]; |
|
230 | 230 |
swap(_last_active[_highest_active]--,_last_active[_highest_active+1]); |
231 | 231 |
--_first[++_highest_active]; |
232 | 232 |
} |
233 | 233 |
|
234 | 234 |
///Lift the highest active item to the given level. |
235 | 235 |
|
236 | 236 |
///Lift the item returned by highestActive() to level \c new_level. |
237 | 237 |
/// |
238 | 238 |
///\warning \c new_level must be strictly higher |
239 | 239 |
///than the current level. |
240 | 240 |
/// |
241 | 241 |
void liftHighestActive(int new_level) |
242 | 242 |
{ |
243 | 243 |
const Item li = *_last_active[_highest_active]; |
244 | 244 |
|
245 | 245 |
copy(--_first[_highest_active+1],_last_active[_highest_active]--); |
246 | 246 |
for(int l=_highest_active+1;l<new_level;l++) |
247 | 247 |
{ |
248 | 248 |
copy(--_first[l+1],_first[l]); |
249 | 249 |
--_last_active[l]; |
250 | 250 |
} |
251 | 251 |
copy(li,_first[new_level]); |
252 |
_level |
|
252 |
_level[li] = new_level; |
|
253 | 253 |
_highest_active=new_level; |
254 | 254 |
} |
255 | 255 |
|
256 | 256 |
///Lift the highest active item to the top level. |
257 | 257 |
|
258 | 258 |
///Lift the item returned by highestActive() to the top level and |
259 | 259 |
///deactivate it. |
260 | 260 |
void liftHighestActiveToTop() |
261 | 261 |
{ |
262 | 262 |
const Item li = *_last_active[_highest_active]; |
263 | 263 |
|
264 | 264 |
copy(--_first[_highest_active+1],_last_active[_highest_active]--); |
265 | 265 |
for(int l=_highest_active+1;l<_max_level;l++) |
266 | 266 |
{ |
267 | 267 |
copy(--_first[l+1],_first[l]); |
268 | 268 |
--_last_active[l]; |
269 | 269 |
} |
270 | 270 |
copy(li,_first[_max_level]); |
271 | 271 |
--_last_active[_max_level]; |
272 |
_level |
|
272 |
_level[li] = _max_level; |
|
273 | 273 |
|
274 | 274 |
while(_highest_active>=0 && |
275 | 275 |
_last_active[_highest_active]<_first[_highest_active]) |
276 | 276 |
_highest_active--; |
277 | 277 |
} |
278 | 278 |
|
279 | 279 |
///@} |
280 | 280 |
|
281 | 281 |
///\name Active Item on Certain Level |
282 | 282 |
///Functions for working with the active items. |
283 | 283 |
|
284 | 284 |
///@{ |
285 | 285 |
|
286 | 286 |
///Return an active item on level \c l. |
287 | 287 |
|
288 | 288 |
///Return an active item on level \c l or \ref INVALID if there is no such |
289 | 289 |
///an item. (\c l must be from the range [0...\c max_level]. |
290 | 290 |
Item activeOn(int l) const |
291 | 291 |
{ |
292 | 292 |
return _last_active[l]>=_first[l]?*_last_active[l]:INVALID; |
293 | 293 |
} |
294 | 294 |
|
295 | 295 |
///Lift the active item returned by \c activeOn(level) by one. |
296 | 296 |
|
297 | 297 |
///Lift the active item returned by \ref activeOn() "activeOn(level)" |
298 | 298 |
///by one. |
299 | 299 |
Item liftActiveOn(int level) |
300 | 300 |
{ |
301 | 301 |
Item it =*_last_active[level]; |
302 |
|
|
302 |
++_level[it]; |
|
303 | 303 |
swap(_last_active[level]--, --_first[level+1]); |
304 | 304 |
if (level+1>_highest_active) ++_highest_active; |
305 | 305 |
} |
306 | 306 |
|
307 | 307 |
///Lift the active item returned by \c activeOn(level) to the given level. |
308 | 308 |
|
309 | 309 |
///Lift the active item returned by \ref activeOn() "activeOn(level)" |
310 | 310 |
///to the given level. |
311 | 311 |
void liftActiveOn(int level, int new_level) |
312 | 312 |
{ |
313 | 313 |
const Item ai = *_last_active[level]; |
314 | 314 |
|
315 | 315 |
copy(--_first[level+1], _last_active[level]--); |
316 | 316 |
for(int l=level+1;l<new_level;l++) |
317 | 317 |
{ |
318 | 318 |
copy(_last_active[l],_first[l]); |
319 | 319 |
copy(--_first[l+1], _last_active[l]--); |
320 | 320 |
} |
321 | 321 |
copy(ai,_first[new_level]); |
322 |
_level |
|
322 |
_level[ai] = new_level; |
|
323 | 323 |
if (new_level>_highest_active) _highest_active=new_level; |
324 | 324 |
} |
325 | 325 |
|
326 | 326 |
///Lift the active item returned by \c activeOn(level) to the top level. |
327 | 327 |
|
328 | 328 |
///Lift the active item returned by \ref activeOn() "activeOn(level)" |
329 | 329 |
///to the top level and deactivate it. |
330 | 330 |
void liftActiveToTop(int level) |
331 | 331 |
{ |
332 | 332 |
const Item ai = *_last_active[level]; |
333 | 333 |
|
334 | 334 |
copy(--_first[level+1],_last_active[level]--); |
335 | 335 |
for(int l=level+1;l<_max_level;l++) |
336 | 336 |
{ |
337 | 337 |
copy(_last_active[l],_first[l]); |
338 | 338 |
copy(--_first[l+1], _last_active[l]--); |
339 | 339 |
} |
340 | 340 |
copy(ai,_first[_max_level]); |
341 | 341 |
--_last_active[_max_level]; |
342 |
_level |
|
342 |
_level[ai] = _max_level; |
|
343 | 343 |
|
344 | 344 |
if (_highest_active==level) { |
345 | 345 |
while(_highest_active>=0 && |
346 | 346 |
_last_active[_highest_active]<_first[_highest_active]) |
347 | 347 |
_highest_active--; |
348 | 348 |
} |
349 | 349 |
} |
350 | 350 |
|
351 | 351 |
///@} |
352 | 352 |
|
353 | 353 |
///Lift an active item to a higher level. |
354 | 354 |
|
355 | 355 |
///Lift an active item to a higher level. |
356 | 356 |
///\param i The item to be lifted. It must be active. |
357 | 357 |
///\param new_level The new level of \c i. It must be strictly higher |
358 | 358 |
///than the current level. |
359 | 359 |
/// |
360 | 360 |
void lift(Item i, int new_level) |
361 | 361 |
{ |
362 | 362 |
const int lo = _level[i]; |
363 | 363 |
const Vit w = _where[i]; |
364 | 364 |
|
365 | 365 |
copy(_last_active[lo],w); |
366 | 366 |
copy(--_first[lo+1],_last_active[lo]--); |
367 | 367 |
for(int l=lo+1;l<new_level;l++) |
368 | 368 |
{ |
369 | 369 |
copy(_last_active[l],_first[l]); |
370 | 370 |
copy(--_first[l+1],_last_active[l]--); |
371 | 371 |
} |
372 | 372 |
copy(i,_first[new_level]); |
373 |
_level |
|
373 |
_level[i] = new_level; |
|
374 | 374 |
if(new_level>_highest_active) _highest_active=new_level; |
375 | 375 |
} |
376 | 376 |
|
377 | 377 |
///Move an inactive item to the top but one level (in a dirty way). |
378 | 378 |
|
379 | 379 |
///This function moves an inactive item from the top level to the top |
380 | 380 |
///but one level (in a dirty way). |
381 | 381 |
///\warning It makes the underlying datastructure corrupt, so use it |
382 | 382 |
///only if you really know what it is for. |
383 | 383 |
///\pre The item is on the top level. |
384 | 384 |
void dirtyTopButOne(Item i) { |
385 |
_level |
|
385 |
_level[i] = _max_level - 1; |
|
386 | 386 |
} |
387 | 387 |
|
388 | 388 |
///Lift all items on and above the given level to the top level. |
389 | 389 |
|
390 | 390 |
///This function lifts all items on and above level \c l to the top |
391 | 391 |
///level and deactivates them. |
392 | 392 |
void liftToTop(int l) |
393 | 393 |
{ |
394 | 394 |
const Vit f=_first[l]; |
395 | 395 |
const Vit tl=_first[_max_level]; |
396 | 396 |
for(Vit i=f;i!=tl;++i) |
397 |
_level |
|
397 |
_level[*i] = _max_level; |
|
398 | 398 |
for(int i=l;i<=_max_level;i++) |
399 | 399 |
{ |
400 | 400 |
_first[i]=f; |
401 | 401 |
_last_active[i]=f-1; |
402 | 402 |
} |
403 | 403 |
for(_highest_active=l-1; |
404 | 404 |
_highest_active>=0 && |
405 | 405 |
_last_active[_highest_active]<_first[_highest_active]; |
406 | 406 |
_highest_active--) ; |
407 | 407 |
} |
408 | 408 |
|
409 | 409 |
private: |
410 | 410 |
int _init_lev; |
411 | 411 |
Vit _init_num; |
412 | 412 |
|
413 | 413 |
public: |
414 | 414 |
|
415 | 415 |
///\name Initialization |
416 | 416 |
///Using these functions you can initialize the levels of the items. |
417 | 417 |
///\n |
418 | 418 |
///The initialization must be started with calling \c initStart(). |
419 | 419 |
///Then the items should be listed level by level starting with the |
420 | 420 |
///lowest one (level 0) using \c initAddItem() and \c initNewLevel(). |
421 | 421 |
///Finally \c initFinish() must be called. |
422 | 422 |
///The items not listed are put on the highest level. |
423 | 423 |
///@{ |
424 | 424 |
|
425 | 425 |
///Start the initialization process. |
426 | 426 |
void initStart() |
427 | 427 |
{ |
428 | 428 |
_init_lev=0; |
429 | 429 |
_init_num=&_items[0]; |
430 | 430 |
_first[0]=&_items[0]; |
431 | 431 |
_last_active[0]=&_items[0]-1; |
432 | 432 |
Vit n=&_items[0]; |
433 | 433 |
for(typename ItemSetTraits<GR,Item>::ItemIt i(_g);i!=INVALID;++i) |
434 | 434 |
{ |
435 | 435 |
*n=i; |
436 |
_where.set(i,n); |
|
437 |
_level.set(i,_max_level); |
|
436 |
_where[i] = n; |
|
437 |
_level[i] = _max_level; |
|
438 | 438 |
++n; |
439 | 439 |
} |
440 | 440 |
} |
441 | 441 |
|
442 | 442 |
///Add an item to the current level. |
443 | 443 |
void initAddItem(Item i) |
444 | 444 |
{ |
445 | 445 |
swap(_where[i],_init_num); |
446 |
_level |
|
446 |
_level[i] = _init_lev; |
|
447 | 447 |
++_init_num; |
448 | 448 |
} |
449 | 449 |
|
450 | 450 |
///Start a new level. |
451 | 451 |
|
452 | 452 |
///Start a new level. |
453 | 453 |
///It shouldn't be used before the items on level 0 are listed. |
454 | 454 |
void initNewLevel() |
455 | 455 |
{ |
456 | 456 |
_init_lev++; |
457 | 457 |
_first[_init_lev]=_init_num; |
458 | 458 |
_last_active[_init_lev]=_init_num-1; |
459 | 459 |
} |
460 | 460 |
|
461 | 461 |
///Finalize the initialization process. |
462 | 462 |
void initFinish() |
463 | 463 |
{ |
464 | 464 |
for(_init_lev++;_init_lev<=_max_level;_init_lev++) |
465 | 465 |
{ |
466 | 466 |
_first[_init_lev]=_init_num; |
467 | 467 |
_last_active[_init_lev]=_init_num-1; |
468 | 468 |
} |
469 | 469 |
_first[_max_level+1]=&_items[0]+_item_num; |
470 | 470 |
_last_active[_max_level+1]=&_items[0]+_item_num-1; |
471 | 471 |
_highest_active = -1; |
472 | 472 |
} |
473 | 473 |
|
474 | 474 |
///@} |
475 | 475 |
|
476 | 476 |
}; |
477 | 477 |
|
478 | 478 |
///Class for handling "labels" in push-relabel type algorithms. |
479 | 479 |
|
480 | 480 |
///A class for handling "labels" in push-relabel type algorithms. |
481 | 481 |
/// |
482 | 482 |
///\ingroup auxdat |
483 | 483 |
///Using this class you can assign "labels" (nonnegative integer numbers) |
484 | 484 |
///to the edges or nodes of a graph, manipulate and query them through |
485 | 485 |
///operations typically arising in "push-relabel" type algorithms. |
486 | 486 |
/// |
487 | 487 |
///Each item is either \em active or not, and you can also choose a |
488 | 488 |
///highest level active item. |
489 | 489 |
/// |
490 | 490 |
///\sa Elevator |
491 | 491 |
/// |
492 | 492 |
///\param GR Type of the underlying graph. |
493 | 493 |
///\param Item Type of the items the data is assigned to (\c GR::Node, |
494 | 494 |
///\c GR::Arc or \c GR::Edge). |
495 | 495 |
template <class GR, class Item> |
496 | 496 |
class LinkedElevator { |
497 | 497 |
public: |
498 | 498 |
|
499 | 499 |
typedef Item Key; |
500 | 500 |
typedef int Value; |
501 | 501 |
|
502 | 502 |
private: |
503 | 503 |
|
504 | 504 |
typedef typename ItemSetTraits<GR,Item>:: |
505 | 505 |
template Map<Item>::Type ItemMap; |
506 | 506 |
typedef typename ItemSetTraits<GR,Item>:: |
507 | 507 |
template Map<int>::Type IntMap; |
508 | 508 |
typedef typename ItemSetTraits<GR,Item>:: |
509 | 509 |
template Map<bool>::Type BoolMap; |
510 | 510 |
|
511 | 511 |
const GR &_graph; |
512 | 512 |
int _max_level; |
513 | 513 |
int _item_num; |
514 | 514 |
std::vector<Item> _first, _last; |
515 | 515 |
ItemMap _prev, _next; |
516 | 516 |
int _highest_active; |
517 | 517 |
IntMap _level; |
518 | 518 |
BoolMap _active; |
519 | 519 |
|
520 | 520 |
public: |
521 | 521 |
///Constructor with given maximum level. |
522 | 522 |
|
523 | 523 |
///Constructor with given maximum level. |
524 | 524 |
/// |
525 | 525 |
///\param graph The underlying graph. |
526 | 526 |
///\param max_level The maximum allowed level. |
527 | 527 |
///Set the range of the possible labels to <tt>[0..max_level]</tt>. |
528 | 528 |
LinkedElevator(const GR& graph, int max_level) |
529 | 529 |
: _graph(graph), _max_level(max_level), _item_num(_max_level), |
530 | 530 |
_first(_max_level + 1), _last(_max_level + 1), |
531 | 531 |
_prev(graph), _next(graph), |
532 | 532 |
_highest_active(-1), _level(graph), _active(graph) {} |
533 | 533 |
|
534 | 534 |
///Constructor. |
535 | 535 |
|
536 | 536 |
///Constructor. |
537 | 537 |
/// |
538 | 538 |
///\param graph The underlying graph. |
539 | 539 |
///Set the range of the possible labels to <tt>[0..max_level]</tt>, |
540 | 540 |
///where \c max_level is equal to the number of labeled items in the graph. |
541 | 541 |
LinkedElevator(const GR& graph) |
542 | 542 |
: _graph(graph), _max_level(countItems<GR, Item>(graph)), |
543 | 543 |
_item_num(_max_level), |
544 | 544 |
_first(_max_level + 1), _last(_max_level + 1), |
545 | 545 |
_prev(graph, INVALID), _next(graph, INVALID), |
546 | 546 |
_highest_active(-1), _level(graph), _active(graph) {} |
547 | 547 |
|
548 | 548 |
|
549 | 549 |
///Activate item \c i. |
550 | 550 |
|
551 | 551 |
///Activate item \c i. |
552 | 552 |
///\pre Item \c i shouldn't be active before. |
553 | 553 |
void activate(Item i) { |
554 |
_active |
|
554 |
_active[i] = true; |
|
555 | 555 |
|
556 | 556 |
int level = _level[i]; |
557 | 557 |
if (level > _highest_active) { |
558 | 558 |
_highest_active = level; |
559 | 559 |
} |
560 | 560 |
|
561 | 561 |
if (_prev[i] == INVALID || _active[_prev[i]]) return; |
562 | 562 |
//unlace |
563 |
_next |
|
563 |
_next[_prev[i]] = _next[i]; |
|
564 | 564 |
if (_next[i] != INVALID) { |
565 |
_prev |
|
565 |
_prev[_next[i]] = _prev[i]; |
|
566 | 566 |
} else { |
567 | 567 |
_last[level] = _prev[i]; |
568 | 568 |
} |
569 | 569 |
//lace |
570 |
_next.set(i, _first[level]); |
|
571 |
_prev.set(_first[level], i); |
|
572 |
|
|
570 |
_next[i] = _first[level]; |
|
571 |
_prev[_first[level]] = i; |
|
572 |
_prev[i] = INVALID; |
|
573 | 573 |
_first[level] = i; |
574 | 574 |
|
575 | 575 |
} |
576 | 576 |
|
577 | 577 |
///Deactivate item \c i. |
578 | 578 |
|
579 | 579 |
///Deactivate item \c i. |
580 | 580 |
///\pre Item \c i must be active before. |
581 | 581 |
void deactivate(Item i) { |
582 |
_active |
|
582 |
_active[i] = false; |
|
583 | 583 |
int level = _level[i]; |
584 | 584 |
|
585 | 585 |
if (_next[i] == INVALID || !_active[_next[i]]) |
586 | 586 |
goto find_highest_level; |
587 | 587 |
|
588 | 588 |
//unlace |
589 |
_prev |
|
589 |
_prev[_next[i]] = _prev[i]; |
|
590 | 590 |
if (_prev[i] != INVALID) { |
591 |
_next |
|
591 |
_next[_prev[i]] = _next[i]; |
|
592 | 592 |
} else { |
593 | 593 |
_first[_level[i]] = _next[i]; |
594 | 594 |
} |
595 | 595 |
//lace |
596 |
_prev.set(i, _last[level]); |
|
597 |
_next.set(_last[level], i); |
|
598 |
|
|
596 |
_prev[i] = _last[level]; |
|
597 |
_next[_last[level]] = i; |
|
598 |
_next[i] = INVALID; |
|
599 | 599 |
_last[level] = i; |
600 | 600 |
|
601 | 601 |
find_highest_level: |
602 | 602 |
if (level == _highest_active) { |
603 | 603 |
while (_highest_active >= 0 && activeFree(_highest_active)) |
604 | 604 |
--_highest_active; |
605 | 605 |
} |
606 | 606 |
} |
607 | 607 |
|
608 | 608 |
///Query whether item \c i is active |
609 | 609 |
bool active(Item i) const { return _active[i]; } |
610 | 610 |
|
611 | 611 |
///Return the level of item \c i. |
612 | 612 |
int operator[](Item i) const { return _level[i]; } |
613 | 613 |
|
614 | 614 |
///Return the number of items on level \c l. |
615 | 615 |
int onLevel(int l) const { |
616 | 616 |
int num = 0; |
617 | 617 |
Item n = _first[l]; |
618 | 618 |
while (n != INVALID) { |
619 | 619 |
++num; |
620 | 620 |
n = _next[n]; |
621 | 621 |
} |
622 | 622 |
return num; |
623 | 623 |
} |
624 | 624 |
|
625 | 625 |
///Return true if the level is empty. |
626 | 626 |
bool emptyLevel(int l) const { |
627 | 627 |
return _first[l] == INVALID; |
628 | 628 |
} |
629 | 629 |
|
630 | 630 |
///Return the number of items above level \c l. |
631 | 631 |
int aboveLevel(int l) const { |
632 | 632 |
int num = 0; |
633 | 633 |
for (int level = l + 1; level < _max_level; ++level) |
634 | 634 |
num += onLevel(level); |
635 | 635 |
return num; |
636 | 636 |
} |
637 | 637 |
|
638 | 638 |
///Return the number of active items on level \c l. |
639 | 639 |
int activesOnLevel(int l) const { |
640 | 640 |
int num = 0; |
641 | 641 |
Item n = _first[l]; |
642 | 642 |
while (n != INVALID && _active[n]) { |
643 | 643 |
++num; |
644 | 644 |
n = _next[n]; |
645 | 645 |
} |
646 | 646 |
return num; |
647 | 647 |
} |
648 | 648 |
|
649 | 649 |
///Return true if there is no active item on level \c l. |
650 | 650 |
bool activeFree(int l) const { |
651 | 651 |
return _first[l] == INVALID || !_active[_first[l]]; |
652 | 652 |
} |
653 | 653 |
|
654 | 654 |
///Return the maximum allowed level. |
655 | 655 |
int maxLevel() const { |
656 | 656 |
return _max_level; |
657 | 657 |
} |
658 | 658 |
|
659 | 659 |
///\name Highest Active Item |
660 | 660 |
///Functions for working with the highest level |
661 | 661 |
///active item. |
662 | 662 |
|
663 | 663 |
///@{ |
664 | 664 |
|
665 | 665 |
///Return a highest level active item. |
666 | 666 |
|
667 | 667 |
///Return a highest level active item or INVALID if there is no active |
668 | 668 |
///item. |
669 | 669 |
Item highestActive() const { |
670 | 670 |
return _highest_active >= 0 ? _first[_highest_active] : INVALID; |
671 | 671 |
} |
672 | 672 |
|
673 | 673 |
///Return the highest active level. |
674 | 674 |
|
675 | 675 |
///Return the level of the highest active item or -1 if there is no active |
676 | 676 |
///item. |
677 | 677 |
int highestActiveLevel() const { |
678 | 678 |
return _highest_active; |
679 | 679 |
} |
680 | 680 |
|
681 | 681 |
///Lift the highest active item by one. |
682 | 682 |
|
683 | 683 |
///Lift the item returned by highestActive() by one. |
684 | 684 |
/// |
685 | 685 |
void liftHighestActive() { |
686 | 686 |
Item i = _first[_highest_active]; |
687 | 687 |
if (_next[i] != INVALID) { |
688 |
_prev |
|
688 |
_prev[_next[i]] = INVALID; |
|
689 | 689 |
_first[_highest_active] = _next[i]; |
690 | 690 |
} else { |
691 | 691 |
_first[_highest_active] = INVALID; |
692 | 692 |
_last[_highest_active] = INVALID; |
693 | 693 |
} |
694 |
_level |
|
694 |
_level[i] = ++_highest_active; |
|
695 | 695 |
if (_first[_highest_active] == INVALID) { |
696 | 696 |
_first[_highest_active] = i; |
697 | 697 |
_last[_highest_active] = i; |
698 |
_prev.set(i, INVALID); |
|
699 |
_next.set(i, INVALID); |
|
698 |
_prev[i] = INVALID; |
|
699 |
_next[i] = INVALID; |
|
700 | 700 |
} else { |
701 |
_prev.set(_first[_highest_active], i); |
|
702 |
_next.set(i, _first[_highest_active]); |
|
701 |
_prev[_first[_highest_active]] = i; |
|
702 |
_next[i] = _first[_highest_active]; |
|
703 | 703 |
_first[_highest_active] = i; |
704 | 704 |
} |
705 | 705 |
} |
706 | 706 |
|
707 | 707 |
///Lift the highest active item to the given level. |
708 | 708 |
|
709 | 709 |
///Lift the item returned by highestActive() to level \c new_level. |
710 | 710 |
/// |
711 | 711 |
///\warning \c new_level must be strictly higher |
712 | 712 |
///than the current level. |
713 | 713 |
/// |
714 | 714 |
void liftHighestActive(int new_level) { |
715 | 715 |
Item i = _first[_highest_active]; |
716 | 716 |
if (_next[i] != INVALID) { |
717 |
_prev |
|
717 |
_prev[_next[i]] = INVALID; |
|
718 | 718 |
_first[_highest_active] = _next[i]; |
719 | 719 |
} else { |
720 | 720 |
_first[_highest_active] = INVALID; |
721 | 721 |
_last[_highest_active] = INVALID; |
722 | 722 |
} |
723 |
_level |
|
723 |
_level[i] = _highest_active = new_level; |
|
724 | 724 |
if (_first[_highest_active] == INVALID) { |
725 | 725 |
_first[_highest_active] = _last[_highest_active] = i; |
726 |
_prev.set(i, INVALID); |
|
727 |
_next.set(i, INVALID); |
|
726 |
_prev[i] = INVALID; |
|
727 |
_next[i] = INVALID; |
|
728 | 728 |
} else { |
729 |
_prev.set(_first[_highest_active], i); |
|
730 |
_next.set(i, _first[_highest_active]); |
|
729 |
_prev[_first[_highest_active]] = i; |
|
730 |
_next[i] = _first[_highest_active]; |
|
731 | 731 |
_first[_highest_active] = i; |
732 | 732 |
} |
733 | 733 |
} |
734 | 734 |
|
735 | 735 |
///Lift the highest active item to the top level. |
736 | 736 |
|
737 | 737 |
///Lift the item returned by highestActive() to the top level and |
738 | 738 |
///deactivate it. |
739 | 739 |
void liftHighestActiveToTop() { |
740 | 740 |
Item i = _first[_highest_active]; |
741 |
_level |
|
741 |
_level[i] = _max_level; |
|
742 | 742 |
if (_next[i] != INVALID) { |
743 |
_prev |
|
743 |
_prev[_next[i]] = INVALID; |
|
744 | 744 |
_first[_highest_active] = _next[i]; |
745 | 745 |
} else { |
746 | 746 |
_first[_highest_active] = INVALID; |
747 | 747 |
_last[_highest_active] = INVALID; |
748 | 748 |
} |
749 | 749 |
while (_highest_active >= 0 && activeFree(_highest_active)) |
750 | 750 |
--_highest_active; |
751 | 751 |
} |
752 | 752 |
|
753 | 753 |
///@} |
754 | 754 |
|
755 | 755 |
///\name Active Item on Certain Level |
756 | 756 |
///Functions for working with the active items. |
757 | 757 |
|
758 | 758 |
///@{ |
759 | 759 |
|
760 | 760 |
///Return an active item on level \c l. |
761 | 761 |
|
762 | 762 |
///Return an active item on level \c l or \ref INVALID if there is no such |
763 | 763 |
///an item. (\c l must be from the range [0...\c max_level]. |
764 | 764 |
Item activeOn(int l) const |
765 | 765 |
{ |
766 | 766 |
return _active[_first[l]] ? _first[l] : INVALID; |
767 | 767 |
} |
768 | 768 |
|
769 | 769 |
///Lift the active item returned by \c activeOn(l) by one. |
770 | 770 |
|
771 | 771 |
///Lift the active item returned by \ref activeOn() "activeOn(l)" |
772 | 772 |
///by one. |
773 | 773 |
Item liftActiveOn(int l) |
774 | 774 |
{ |
775 | 775 |
Item i = _first[l]; |
776 | 776 |
if (_next[i] != INVALID) { |
777 |
_prev |
|
777 |
_prev[_next[i]] = INVALID; |
|
778 | 778 |
_first[l] = _next[i]; |
779 | 779 |
} else { |
780 | 780 |
_first[l] = INVALID; |
781 | 781 |
_last[l] = INVALID; |
782 | 782 |
} |
783 |
_level |
|
783 |
_level[i] = ++l; |
|
784 | 784 |
if (_first[l] == INVALID) { |
785 | 785 |
_first[l] = _last[l] = i; |
786 |
_prev.set(i, INVALID); |
|
787 |
_next.set(i, INVALID); |
|
786 |
_prev[i] = INVALID; |
|
787 |
_next[i] = INVALID; |
|
788 | 788 |
} else { |
789 |
_prev.set(_first[l], i); |
|
790 |
_next.set(i, _first[l]); |
|
789 |
_prev[_first[l]] = i; |
|
790 |
_next[i] = _first[l]; |
|
791 | 791 |
_first[l] = i; |
792 | 792 |
} |
793 | 793 |
if (_highest_active < l) { |
794 | 794 |
_highest_active = l; |
795 | 795 |
} |
796 | 796 |
} |
797 | 797 |
|
798 | 798 |
///Lift the active item returned by \c activeOn(l) to the given level. |
799 | 799 |
|
800 | 800 |
///Lift the active item returned by \ref activeOn() "activeOn(l)" |
801 | 801 |
///to the given level. |
802 | 802 |
void liftActiveOn(int l, int new_level) |
803 | 803 |
{ |
804 | 804 |
Item i = _first[l]; |
805 | 805 |
if (_next[i] != INVALID) { |
806 |
_prev |
|
806 |
_prev[_next[i]] = INVALID; |
|
807 | 807 |
_first[l] = _next[i]; |
808 | 808 |
} else { |
809 | 809 |
_first[l] = INVALID; |
810 | 810 |
_last[l] = INVALID; |
811 | 811 |
} |
812 |
_level |
|
812 |
_level[i] = l = new_level; |
|
813 | 813 |
if (_first[l] == INVALID) { |
814 | 814 |
_first[l] = _last[l] = i; |
815 |
_prev.set(i, INVALID); |
|
816 |
_next.set(i, INVALID); |
|
815 |
_prev[i] = INVALID; |
|
816 |
_next[i] = INVALID; |
|
817 | 817 |
} else { |
818 |
_prev.set(_first[l], i); |
|
819 |
_next.set(i, _first[l]); |
|
818 |
_prev[_first[l]] = i; |
|
819 |
_next[i] = _first[l]; |
|
820 | 820 |
_first[l] = i; |
821 | 821 |
} |
822 | 822 |
if (_highest_active < l) { |
823 | 823 |
_highest_active = l; |
824 | 824 |
} |
825 | 825 |
} |
826 | 826 |
|
827 | 827 |
///Lift the active item returned by \c activeOn(l) to the top level. |
828 | 828 |
|
829 | 829 |
///Lift the active item returned by \ref activeOn() "activeOn(l)" |
830 | 830 |
///to the top level and deactivate it. |
831 | 831 |
void liftActiveToTop(int l) |
832 | 832 |
{ |
833 | 833 |
Item i = _first[l]; |
834 | 834 |
if (_next[i] != INVALID) { |
835 |
_prev |
|
835 |
_prev[_next[i]] = INVALID; |
|
836 | 836 |
_first[l] = _next[i]; |
837 | 837 |
} else { |
838 | 838 |
_first[l] = INVALID; |
839 | 839 |
_last[l] = INVALID; |
840 | 840 |
} |
841 |
_level |
|
841 |
_level[i] = _max_level; |
|
842 | 842 |
if (l == _highest_active) { |
843 | 843 |
while (_highest_active >= 0 && activeFree(_highest_active)) |
844 | 844 |
--_highest_active; |
845 | 845 |
} |
846 | 846 |
} |
847 | 847 |
|
848 | 848 |
///@} |
849 | 849 |
|
850 | 850 |
/// \brief Lift an active item to a higher level. |
851 | 851 |
/// |
852 | 852 |
/// Lift an active item to a higher level. |
853 | 853 |
/// \param i The item to be lifted. It must be active. |
854 | 854 |
/// \param new_level The new level of \c i. It must be strictly higher |
855 | 855 |
/// than the current level. |
856 | 856 |
/// |
857 | 857 |
void lift(Item i, int new_level) { |
858 | 858 |
if (_next[i] != INVALID) { |
859 |
_prev |
|
859 |
_prev[_next[i]] = _prev[i]; |
|
860 | 860 |
} else { |
861 | 861 |
_last[new_level] = _prev[i]; |
862 | 862 |
} |
863 | 863 |
if (_prev[i] != INVALID) { |
864 |
_next |
|
864 |
_next[_prev[i]] = _next[i]; |
|
865 | 865 |
} else { |
866 | 866 |
_first[new_level] = _next[i]; |
867 | 867 |
} |
868 |
_level |
|
868 |
_level[i] = new_level; |
|
869 | 869 |
if (_first[new_level] == INVALID) { |
870 | 870 |
_first[new_level] = _last[new_level] = i; |
871 |
_prev.set(i, INVALID); |
|
872 |
_next.set(i, INVALID); |
|
871 |
_prev[i] = INVALID; |
|
872 |
_next[i] = INVALID; |
|
873 | 873 |
} else { |
874 |
_prev.set(_first[new_level], i); |
|
875 |
_next.set(i, _first[new_level]); |
|
874 |
_prev[_first[new_level]] = i; |
|
875 |
_next[i] = _first[new_level]; |
|
876 | 876 |
_first[new_level] = i; |
877 | 877 |
} |
878 | 878 |
if (_highest_active < new_level) { |
879 | 879 |
_highest_active = new_level; |
880 | 880 |
} |
881 | 881 |
} |
882 | 882 |
|
883 | 883 |
///Move an inactive item to the top but one level (in a dirty way). |
884 | 884 |
|
885 | 885 |
///This function moves an inactive item from the top level to the top |
886 | 886 |
///but one level (in a dirty way). |
887 | 887 |
///\warning It makes the underlying datastructure corrupt, so use it |
888 | 888 |
///only if you really know what it is for. |
889 | 889 |
///\pre The item is on the top level. |
890 | 890 |
void dirtyTopButOne(Item i) { |
891 |
_level |
|
891 |
_level[i] = _max_level - 1; |
|
892 | 892 |
} |
893 | 893 |
|
894 | 894 |
///Lift all items on and above the given level to the top level. |
895 | 895 |
|
896 | 896 |
///This function lifts all items on and above level \c l to the top |
897 | 897 |
///level and deactivates them. |
898 | 898 |
void liftToTop(int l) { |
899 | 899 |
for (int i = l + 1; _first[i] != INVALID; ++i) { |
900 | 900 |
Item n = _first[i]; |
901 | 901 |
while (n != INVALID) { |
902 |
_level |
|
902 |
_level[n] = _max_level; |
|
903 | 903 |
n = _next[n]; |
904 | 904 |
} |
905 | 905 |
_first[i] = INVALID; |
906 | 906 |
_last[i] = INVALID; |
907 | 907 |
} |
908 | 908 |
if (_highest_active > l - 1) { |
909 | 909 |
_highest_active = l - 1; |
910 | 910 |
while (_highest_active >= 0 && activeFree(_highest_active)) |
911 | 911 |
--_highest_active; |
912 | 912 |
} |
913 | 913 |
} |
914 | 914 |
|
915 | 915 |
private: |
916 | 916 |
|
917 | 917 |
int _init_level; |
918 | 918 |
|
919 | 919 |
public: |
920 | 920 |
|
921 | 921 |
///\name Initialization |
922 | 922 |
///Using these functions you can initialize the levels of the items. |
923 | 923 |
///\n |
924 | 924 |
///The initialization must be started with calling \c initStart(). |
925 | 925 |
///Then the items should be listed level by level starting with the |
926 | 926 |
///lowest one (level 0) using \c initAddItem() and \c initNewLevel(). |
927 | 927 |
///Finally \c initFinish() must be called. |
928 | 928 |
///The items not listed are put on the highest level. |
929 | 929 |
///@{ |
930 | 930 |
|
931 | 931 |
///Start the initialization process. |
932 | 932 |
void initStart() { |
933 | 933 |
|
934 | 934 |
for (int i = 0; i <= _max_level; ++i) { |
935 | 935 |
_first[i] = _last[i] = INVALID; |
936 | 936 |
} |
937 | 937 |
_init_level = 0; |
938 | 938 |
for(typename ItemSetTraits<GR,Item>::ItemIt i(_graph); |
939 | 939 |
i != INVALID; ++i) { |
940 |
_level.set(i, _max_level); |
|
941 |
_active.set(i, false); |
|
940 |
_level[i] = _max_level; |
|
941 |
_active[i] = false; |
|
942 | 942 |
} |
943 | 943 |
} |
944 | 944 |
|
945 | 945 |
///Add an item to the current level. |
946 | 946 |
void initAddItem(Item i) { |
947 |
_level |
|
947 |
_level[i] = _init_level; |
|
948 | 948 |
if (_last[_init_level] == INVALID) { |
949 | 949 |
_first[_init_level] = i; |
950 | 950 |
_last[_init_level] = i; |
951 |
_prev.set(i, INVALID); |
|
952 |
_next.set(i, INVALID); |
|
951 |
_prev[i] = INVALID; |
|
952 |
_next[i] = INVALID; |
|
953 | 953 |
} else { |
954 |
_prev.set(i, _last[_init_level]); |
|
955 |
_next.set(i, INVALID); |
|
956 |
|
|
954 |
_prev[i] = _last[_init_level]; |
|
955 |
_next[i] = INVALID; |
|
956 |
_next[_last[_init_level]] = i; |
|
957 | 957 |
_last[_init_level] = i; |
958 | 958 |
} |
959 | 959 |
} |
960 | 960 |
|
961 | 961 |
///Start a new level. |
962 | 962 |
|
963 | 963 |
///Start a new level. |
964 | 964 |
///It shouldn't be used before the items on level 0 are listed. |
965 | 965 |
void initNewLevel() { |
966 | 966 |
++_init_level; |
967 | 967 |
} |
968 | 968 |
|
969 | 969 |
///Finalize the initialization process. |
970 | 970 |
void initFinish() { |
971 | 971 |
_highest_active = -1; |
972 | 972 |
} |
973 | 973 |
|
974 | 974 |
///@} |
975 | 975 |
|
976 | 976 |
}; |
977 | 977 |
|
978 | 978 |
|
979 | 979 |
} //END OF NAMESPACE LEMON |
980 | 980 |
|
981 | 981 |
#endif |
982 | 982 |
1 | 1 |
/* -*- C++ -*- |
2 | 2 |
* |
3 | 3 |
* This file is a part of LEMON, a generic C++ optimization library |
4 | 4 |
* |
5 | 5 |
* Copyright (C) 2003-2008 |
6 | 6 |
* Egervary Jeno Kombinatorikus Optimalizalasi Kutatocsoport |
7 | 7 |
* (Egervary Research Group on Combinatorial Optimization, EGRES). |
8 | 8 |
* |
9 | 9 |
* Permission to use, modify and distribute this software is granted |
10 | 10 |
* provided that this copyright notice appears in all copies. For |
11 | 11 |
* precise terms see the accompanying LICENSE file. |
12 | 12 |
* |
13 | 13 |
* This software is provided "AS IS" with no warranty of any kind, |
14 | 14 |
* express or implied, and with no claim as to its suitability for any |
15 | 15 |
* purpose. |
16 | 16 |
* |
17 | 17 |
*/ |
18 | 18 |
|
19 | 19 |
#ifndef LEMON_GOMORY_HU_TREE_H |
20 | 20 |
#define LEMON_GOMORY_HU_TREE_H |
21 | 21 |
|
22 | 22 |
#include <limits> |
23 | 23 |
|
24 | 24 |
#include <lemon/core.h> |
25 | 25 |
#include <lemon/preflow.h> |
26 | 26 |
#include <lemon/concept_check.h> |
27 | 27 |
#include <lemon/concepts/maps.h> |
28 | 28 |
|
29 | 29 |
/// \ingroup min_cut |
30 | 30 |
/// \file |
31 | 31 |
/// \brief Gomory-Hu cut tree in graphs. |
32 | 32 |
|
33 | 33 |
namespace lemon { |
34 | 34 |
|
35 | 35 |
/// \ingroup min_cut |
36 | 36 |
/// |
37 | 37 |
/// \brief Gomory-Hu cut tree algorithm |
38 | 38 |
/// |
39 | 39 |
/// The Gomory-Hu tree is a tree on the node set of a given graph, but it |
40 | 40 |
/// may contain edges which are not in the original graph. It has the |
41 | 41 |
/// property that the minimum capacity edge of the path between two nodes |
42 | 42 |
/// in this tree has the same weight as the minimum cut in the graph |
43 | 43 |
/// between these nodes. Moreover the components obtained by removing |
44 | 44 |
/// this edge from the tree determine the corresponding minimum cut. |
45 | 45 |
/// |
46 | 46 |
/// Therefore once this tree is computed, the minimum cut between any pair |
47 | 47 |
/// of nodes can easily be obtained. |
48 | 48 |
/// |
49 | 49 |
/// The algorithm calculates \e n-1 distinct minimum cuts (currently with |
50 | 50 |
/// the \ref Preflow algorithm), therefore the algorithm has |
51 | 51 |
/// \f$(O(n^3\sqrt{e})\f$ overall time complexity. It calculates a |
52 | 52 |
/// rooted Gomory-Hu tree, its structure and the weights can be obtained |
53 | 53 |
/// by \c predNode(), \c predValue() and \c rootDist(). |
54 | 54 |
/// |
55 | 55 |
/// The members \c minCutMap() and \c minCutValue() calculate |
56 | 56 |
/// the minimum cut and the minimum cut value between any two nodes |
57 | 57 |
/// in the graph. You can also list (iterate on) the nodes and the |
58 | 58 |
/// edges of the cuts using \c MinCutNodeIt and \c MinCutEdgeIt. |
59 | 59 |
/// |
60 | 60 |
/// \tparam GR The type of the undirected graph the algorithm runs on. |
61 | 61 |
/// \tparam CAP The type of the edge map describing the edge capacities. |
62 | 62 |
/// It is \ref concepts::Graph::EdgeMap "GR::EdgeMap<int>" by default. |
63 | 63 |
#ifdef DOXYGEN |
64 | 64 |
template <typename GR, |
65 | 65 |
typename CAP> |
66 | 66 |
#else |
67 | 67 |
template <typename GR, |
68 | 68 |
typename CAP = typename GR::template EdgeMap<int> > |
69 | 69 |
#endif |
70 | 70 |
class GomoryHu { |
71 | 71 |
public: |
72 | 72 |
|
73 | 73 |
/// The graph type |
74 | 74 |
typedef GR Graph; |
75 | 75 |
/// The type of the edge capacity map |
76 | 76 |
typedef CAP Capacity; |
77 | 77 |
/// The value type of capacities |
78 | 78 |
typedef typename Capacity::Value Value; |
79 | 79 |
|
80 | 80 |
private: |
81 | 81 |
|
82 | 82 |
TEMPLATE_GRAPH_TYPEDEFS(Graph); |
83 | 83 |
|
84 | 84 |
const Graph& _graph; |
85 | 85 |
const Capacity& _capacity; |
86 | 86 |
|
87 | 87 |
Node _root; |
88 | 88 |
typename Graph::template NodeMap<Node>* _pred; |
89 | 89 |
typename Graph::template NodeMap<Value>* _weight; |
90 | 90 |
typename Graph::template NodeMap<int>* _order; |
91 | 91 |
|
92 | 92 |
void createStructures() { |
93 | 93 |
if (!_pred) { |
94 | 94 |
_pred = new typename Graph::template NodeMap<Node>(_graph); |
95 | 95 |
} |
96 | 96 |
if (!_weight) { |
97 | 97 |
_weight = new typename Graph::template NodeMap<Value>(_graph); |
98 | 98 |
} |
99 | 99 |
if (!_order) { |
100 | 100 |
_order = new typename Graph::template NodeMap<int>(_graph); |
101 | 101 |
} |
102 | 102 |
} |
103 | 103 |
|
104 | 104 |
void destroyStructures() { |
105 | 105 |
if (_pred) { |
106 | 106 |
delete _pred; |
107 | 107 |
} |
108 | 108 |
if (_weight) { |
109 | 109 |
delete _weight; |
110 | 110 |
} |
111 | 111 |
if (_order) { |
112 | 112 |
delete _order; |
113 | 113 |
} |
114 | 114 |
} |
115 | 115 |
|
116 | 116 |
public: |
117 | 117 |
|
118 | 118 |
/// \brief Constructor |
119 | 119 |
/// |
120 | 120 |
/// Constructor |
121 | 121 |
/// \param graph The undirected graph the algorithm runs on. |
122 | 122 |
/// \param capacity The edge capacity map. |
123 | 123 |
GomoryHu(const Graph& graph, const Capacity& capacity) |
124 | 124 |
: _graph(graph), _capacity(capacity), |
125 | 125 |
_pred(0), _weight(0), _order(0) |
126 | 126 |
{ |
127 | 127 |
checkConcept<concepts::ReadMap<Edge, Value>, Capacity>(); |
128 | 128 |
} |
129 | 129 |
|
130 | 130 |
|
131 | 131 |
/// \brief Destructor |
132 | 132 |
/// |
133 | 133 |
/// Destructor |
134 | 134 |
~GomoryHu() { |
135 | 135 |
destroyStructures(); |
136 | 136 |
} |
137 | 137 |
|
138 | 138 |
private: |
139 | 139 |
|
140 | 140 |
// Initialize the internal data structures |
141 | 141 |
void init() { |
142 | 142 |
createStructures(); |
143 | 143 |
|
144 | 144 |
_root = NodeIt(_graph); |
145 | 145 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
146 |
_pred->set(n, _root); |
|
147 |
_order->set(n, -1); |
|
146 |
(*_pred)[n] = _root; |
|
147 |
(*_order)[n] = -1; |
|
148 | 148 |
} |
149 |
_pred->set(_root, INVALID); |
|
150 |
_weight->set(_root, std::numeric_limits<Value>::max()); |
|
149 |
(*_pred)[_root] = INVALID; |
|
150 |
(*_weight)[_root] = std::numeric_limits<Value>::max(); |
|
151 | 151 |
} |
152 | 152 |
|
153 | 153 |
|
154 | 154 |
// Start the algorithm |
155 | 155 |
void start() { |
156 | 156 |
Preflow<Graph, Capacity> fa(_graph, _capacity, _root, INVALID); |
157 | 157 |
|
158 | 158 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
159 | 159 |
if (n == _root) continue; |
160 | 160 |
|
161 | 161 |
Node pn = (*_pred)[n]; |
162 | 162 |
fa.source(n); |
163 | 163 |
fa.target(pn); |
164 | 164 |
|
165 | 165 |
fa.runMinCut(); |
166 | 166 |
|
167 |
_weight |
|
167 |
(*_weight)[n] = fa.flowValue(); |
|
168 | 168 |
|
169 | 169 |
for (NodeIt nn(_graph); nn != INVALID; ++nn) { |
170 | 170 |
if (nn != n && fa.minCut(nn) && (*_pred)[nn] == pn) { |
171 |
_pred |
|
171 |
(*_pred)[nn] = n; |
|
172 | 172 |
} |
173 | 173 |
} |
174 | 174 |
if ((*_pred)[pn] != INVALID && fa.minCut((*_pred)[pn])) { |
175 |
_pred->set(n, (*_pred)[pn]); |
|
176 |
_pred->set(pn, n); |
|
177 |
_weight->set(n, (*_weight)[pn]); |
|
178 |
_weight->set(pn, fa.flowValue()); |
|
175 |
(*_pred)[n] = (*_pred)[pn]; |
|
176 |
(*_pred)[pn] = n; |
|
177 |
(*_weight)[n] = (*_weight)[pn]; |
|
178 |
(*_weight)[pn] = fa.flowValue(); |
|
179 | 179 |
} |
180 | 180 |
} |
181 | 181 |
|
182 |
_order |
|
182 |
(*_order)[_root] = 0; |
|
183 | 183 |
int index = 1; |
184 | 184 |
|
185 | 185 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
186 | 186 |
std::vector<Node> st; |
187 | 187 |
Node nn = n; |
188 | 188 |
while ((*_order)[nn] == -1) { |
189 | 189 |
st.push_back(nn); |
190 | 190 |
nn = (*_pred)[nn]; |
191 | 191 |
} |
192 | 192 |
while (!st.empty()) { |
193 |
_order |
|
193 |
(*_order)[st.back()] = index++; |
|
194 | 194 |
st.pop_back(); |
195 | 195 |
} |
196 | 196 |
} |
197 | 197 |
} |
198 | 198 |
|
199 | 199 |
public: |
200 | 200 |
|
201 | 201 |
///\name Execution Control |
202 | 202 |
|
203 | 203 |
///@{ |
204 | 204 |
|
205 | 205 |
/// \brief Run the Gomory-Hu algorithm. |
206 | 206 |
/// |
207 | 207 |
/// This function runs the Gomory-Hu algorithm. |
208 | 208 |
void run() { |
209 | 209 |
init(); |
210 | 210 |
start(); |
211 | 211 |
} |
212 | 212 |
|
213 | 213 |
/// @} |
214 | 214 |
|
215 | 215 |
///\name Query Functions |
216 | 216 |
///The results of the algorithm can be obtained using these |
217 | 217 |
///functions.\n |
218 | 218 |
///\ref run() "run()" should be called before using them.\n |
219 | 219 |
///See also \ref MinCutNodeIt and \ref MinCutEdgeIt. |
220 | 220 |
|
221 | 221 |
///@{ |
222 | 222 |
|
223 | 223 |
/// \brief Return the predecessor node in the Gomory-Hu tree. |
224 | 224 |
/// |
225 | 225 |
/// This function returns the predecessor node in the Gomory-Hu tree. |
226 | 226 |
/// If the node is |
227 | 227 |
/// the root of the Gomory-Hu tree, then it returns \c INVALID. |
228 | 228 |
Node predNode(const Node& node) { |
229 | 229 |
return (*_pred)[node]; |
230 | 230 |
} |
231 | 231 |
|
232 | 232 |
/// \brief Return the distance from the root node in the Gomory-Hu tree. |
233 | 233 |
/// |
234 | 234 |
/// This function returns the distance of \c node from the root node |
235 | 235 |
/// in the Gomory-Hu tree. |
236 | 236 |
int rootDist(const Node& node) { |
237 | 237 |
return (*_order)[node]; |
238 | 238 |
} |
239 | 239 |
|
240 | 240 |
/// \brief Return the weight of the predecessor edge in the |
241 | 241 |
/// Gomory-Hu tree. |
242 | 242 |
/// |
243 | 243 |
/// This function returns the weight of the predecessor edge in the |
244 | 244 |
/// Gomory-Hu tree. If the node is the root, the result is undefined. |
245 | 245 |
Value predValue(const Node& node) { |
246 | 246 |
return (*_weight)[node]; |
247 | 247 |
} |
248 | 248 |
|
249 | 249 |
/// \brief Return the minimum cut value between two nodes |
250 | 250 |
/// |
251 | 251 |
/// This function returns the minimum cut value between two nodes. The |
252 | 252 |
/// algorithm finds the nearest common ancestor in the Gomory-Hu |
253 | 253 |
/// tree and calculates the minimum weight edge on the paths to |
254 | 254 |
/// the ancestor. |
255 | 255 |
Value minCutValue(const Node& s, const Node& t) const { |
256 | 256 |
Node sn = s, tn = t; |
257 | 257 |
Value value = std::numeric_limits<Value>::max(); |
258 | 258 |
|
259 | 259 |
while (sn != tn) { |
260 | 260 |
if ((*_order)[sn] < (*_order)[tn]) { |
261 | 261 |
if ((*_weight)[tn] <= value) value = (*_weight)[tn]; |
262 | 262 |
tn = (*_pred)[tn]; |
263 | 263 |
} else { |
264 | 264 |
if ((*_weight)[sn] <= value) value = (*_weight)[sn]; |
265 | 265 |
sn = (*_pred)[sn]; |
266 | 266 |
} |
267 | 267 |
} |
268 | 268 |
return value; |
269 | 269 |
} |
270 | 270 |
|
271 | 271 |
/// \brief Return the minimum cut between two nodes |
272 | 272 |
/// |
273 | 273 |
/// This function returns the minimum cut between the nodes \c s and \c t |
274 | 274 |
/// in the \c cutMap parameter by setting the nodes in the component of |
275 | 275 |
/// \c s to \c true and the other nodes to \c false. |
276 | 276 |
/// |
277 | 277 |
/// For higher level interfaces, see MinCutNodeIt and MinCutEdgeIt. |
278 | 278 |
template <typename CutMap> |
279 | 279 |
Value minCutMap(const Node& s, ///< The base node. |
280 | 280 |
const Node& t, |
281 | 281 |
///< The node you want to separate from node \c s. |
282 | 282 |
CutMap& cutMap |
283 | 283 |
///< The cut will be returned in this map. |
284 | 284 |
/// It must be a \c bool (or convertible) |
285 | 285 |
/// \ref concepts::ReadWriteMap "ReadWriteMap" |
286 | 286 |
/// on the graph nodes. |
287 | 287 |
) const { |
288 | 288 |
Node sn = s, tn = t; |
289 | 289 |
bool s_root=false; |
290 | 290 |
Node rn = INVALID; |
291 | 291 |
Value value = std::numeric_limits<Value>::max(); |
292 | 292 |
|
293 | 293 |
while (sn != tn) { |
294 | 294 |
if ((*_order)[sn] < (*_order)[tn]) { |
295 | 295 |
if ((*_weight)[tn] <= value) { |
296 | 296 |
rn = tn; |
297 | 297 |
s_root = false; |
298 | 298 |
value = (*_weight)[tn]; |
299 | 299 |
} |
300 | 300 |
tn = (*_pred)[tn]; |
301 | 301 |
} else { |
302 | 302 |
if ((*_weight)[sn] <= value) { |
303 | 303 |
rn = sn; |
304 | 304 |
s_root = true; |
305 | 305 |
value = (*_weight)[sn]; |
306 | 306 |
} |
307 | 307 |
sn = (*_pred)[sn]; |
308 | 308 |
} |
309 | 309 |
} |
310 | 310 |
|
311 | 311 |
typename Graph::template NodeMap<bool> reached(_graph, false); |
312 |
reached |
|
312 |
reached[_root] = true; |
|
313 | 313 |
cutMap.set(_root, !s_root); |
314 |
reached |
|
314 |
reached[rn] = true; |
|
315 | 315 |
cutMap.set(rn, s_root); |
316 | 316 |
|
317 | 317 |
std::vector<Node> st; |
318 | 318 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
319 | 319 |
st.clear(); |
320 | 320 |
Node nn = n; |
321 | 321 |
while (!reached[nn]) { |
322 | 322 |
st.push_back(nn); |
323 | 323 |
nn = (*_pred)[nn]; |
324 | 324 |
} |
325 | 325 |
while (!st.empty()) { |
326 | 326 |
cutMap.set(st.back(), cutMap[nn]); |
327 | 327 |
st.pop_back(); |
328 | 328 |
} |
329 | 329 |
} |
330 | 330 |
|
331 | 331 |
return value; |
332 | 332 |
} |
333 | 333 |
|
334 | 334 |
///@} |
335 | 335 |
|
336 | 336 |
friend class MinCutNodeIt; |
337 | 337 |
|
338 | 338 |
/// Iterate on the nodes of a minimum cut |
339 | 339 |
|
340 | 340 |
/// This iterator class lists the nodes of a minimum cut found by |
341 | 341 |
/// GomoryHu. Before using it, you must allocate a GomoryHu class, |
342 | 342 |
/// and call its \ref GomoryHu::run() "run()" method. |
343 | 343 |
/// |
344 | 344 |
/// This example counts the nodes in the minimum cut separating \c s from |
345 | 345 |
/// \c t. |
346 | 346 |
/// \code |
347 | 347 |
/// GomoruHu<Graph> gom(g, capacities); |
348 | 348 |
/// gom.run(); |
349 | 349 |
/// int cnt=0; |
350 | 350 |
/// for(GomoruHu<Graph>::MinCutNodeIt n(gom,s,t); n!=INVALID; ++n) ++cnt; |
351 | 351 |
/// \endcode |
352 | 352 |
class MinCutNodeIt |
353 | 353 |
{ |
354 | 354 |
bool _side; |
355 | 355 |
typename Graph::NodeIt _node_it; |
356 | 356 |
typename Graph::template NodeMap<bool> _cut; |
357 | 357 |
public: |
358 | 358 |
/// Constructor |
359 | 359 |
|
360 | 360 |
/// Constructor. |
361 | 361 |
/// |
362 | 362 |
MinCutNodeIt(GomoryHu const &gomory, |
363 | 363 |
///< The GomoryHu class. You must call its |
364 | 364 |
/// run() method |
365 | 365 |
/// before initializing this iterator. |
366 | 366 |
const Node& s, ///< The base node. |
367 | 367 |
const Node& t, |
368 | 368 |
///< The node you want to separate from node \c s. |
369 | 369 |
bool side=true |
370 | 370 |
///< If it is \c true (default) then the iterator lists |
371 | 371 |
/// the nodes of the component containing \c s, |
372 | 372 |
/// otherwise it lists the other component. |
373 | 373 |
/// \note As the minimum cut is not always unique, |
374 | 374 |
/// \code |
375 | 375 |
/// MinCutNodeIt(gomory, s, t, true); |
376 | 376 |
/// \endcode |
377 | 377 |
/// and |
378 | 378 |
/// \code |
379 | 379 |
/// MinCutNodeIt(gomory, t, s, false); |
380 | 380 |
/// \endcode |
381 | 381 |
/// does not necessarily give the same set of nodes. |
382 | 382 |
/// However it is ensured that |
383 | 383 |
/// \code |
384 | 384 |
/// MinCutNodeIt(gomory, s, t, true); |
385 | 385 |
/// \endcode |
386 | 386 |
/// and |
387 | 387 |
/// \code |
388 | 388 |
/// MinCutNodeIt(gomory, s, t, false); |
389 | 389 |
/// \endcode |
390 | 390 |
/// together list each node exactly once. |
391 | 391 |
) |
392 | 392 |
: _side(side), _cut(gomory._graph) |
393 | 393 |
{ |
394 | 394 |
gomory.minCutMap(s,t,_cut); |
395 | 395 |
for(_node_it=typename Graph::NodeIt(gomory._graph); |
396 | 396 |
_node_it!=INVALID && _cut[_node_it]!=_side; |
397 | 397 |
++_node_it) {} |
398 | 398 |
} |
399 | 399 |
/// Conversion to \c Node |
400 | 400 |
|
401 | 401 |
/// Conversion to \c Node. |
402 | 402 |
/// |
403 | 403 |
operator typename Graph::Node() const |
404 | 404 |
{ |
405 | 405 |
return _node_it; |
406 | 406 |
} |
407 | 407 |
bool operator==(Invalid) { return _node_it==INVALID; } |
408 | 408 |
bool operator!=(Invalid) { return _node_it!=INVALID; } |
409 | 409 |
/// Next node |
410 | 410 |
|
411 | 411 |
/// Next node. |
412 | 412 |
/// |
413 | 413 |
MinCutNodeIt &operator++() |
414 | 414 |
{ |
415 | 415 |
for(++_node_it;_node_it!=INVALID&&_cut[_node_it]!=_side;++_node_it) {} |
416 | 416 |
return *this; |
417 | 417 |
} |
418 | 418 |
/// Postfix incrementation |
419 | 419 |
|
420 | 420 |
/// Postfix incrementation. |
421 | 421 |
/// |
422 | 422 |
/// \warning This incrementation |
423 | 423 |
/// returns a \c Node, not a \c MinCutNodeIt, as one may |
424 | 424 |
/// expect. |
425 | 425 |
typename Graph::Node operator++(int) |
426 | 426 |
{ |
427 | 427 |
typename Graph::Node n=*this; |
428 | 428 |
++(*this); |
429 | 429 |
return n; |
430 | 430 |
} |
431 | 431 |
}; |
432 | 432 |
|
433 | 433 |
friend class MinCutEdgeIt; |
434 | 434 |
|
435 | 435 |
/// Iterate on the edges of a minimum cut |
436 | 436 |
|
437 | 437 |
/// This iterator class lists the edges of a minimum cut found by |
438 | 438 |
/// GomoryHu. Before using it, you must allocate a GomoryHu class, |
439 | 439 |
/// and call its \ref GomoryHu::run() "run()" method. |
440 | 440 |
/// |
441 | 441 |
/// This example computes the value of the minimum cut separating \c s from |
442 | 442 |
/// \c t. |
443 | 443 |
/// \code |
444 | 444 |
/// GomoruHu<Graph> gom(g, capacities); |
445 | 445 |
/// gom.run(); |
446 | 446 |
/// int value=0; |
447 | 447 |
/// for(GomoruHu<Graph>::MinCutEdgeIt e(gom,s,t); e!=INVALID; ++e) |
448 | 448 |
/// value+=capacities[e]; |
449 | 449 |
/// \endcode |
450 | 450 |
/// the result will be the same as it is returned by |
451 | 451 |
/// \ref GomoryHu::minCutValue() "gom.minCutValue(s,t)" |
452 | 452 |
class MinCutEdgeIt |
453 | 453 |
{ |
454 | 454 |
bool _side; |
455 | 455 |
const Graph &_graph; |
456 | 456 |
typename Graph::NodeIt _node_it; |
457 | 457 |
typename Graph::OutArcIt _arc_it; |
458 | 458 |
typename Graph::template NodeMap<bool> _cut; |
459 | 459 |
void step() |
460 | 460 |
{ |
461 | 461 |
++_arc_it; |
462 | 462 |
while(_node_it!=INVALID && _arc_it==INVALID) |
463 | 463 |
{ |
464 | 464 |
for(++_node_it;_node_it!=INVALID&&!_cut[_node_it];++_node_it) {} |
465 | 465 |
if(_node_it!=INVALID) |
466 | 466 |
_arc_it=typename Graph::OutArcIt(_graph,_node_it); |
467 | 467 |
} |
468 | 468 |
} |
469 | 469 |
|
470 | 470 |
public: |
471 | 471 |
MinCutEdgeIt(GomoryHu const &gomory, |
472 | 472 |
///< The GomoryHu class. You must call its |
473 | 473 |
/// run() method |
474 | 474 |
/// before initializing this iterator. |
475 | 475 |
const Node& s, ///< The base node. |
476 | 476 |
const Node& t, |
477 | 477 |
///< The node you want to separate from node \c s. |
478 | 478 |
bool side=true |
479 | 479 |
///< If it is \c true (default) then the listed arcs |
480 | 480 |
/// will be oriented from the |
481 | 481 |
/// the nodes of the component containing \c s, |
482 | 482 |
/// otherwise they will be oriented in the opposite |
483 | 483 |
/// direction. |
484 | 484 |
) |
485 | 485 |
: _graph(gomory._graph), _cut(_graph) |
486 | 486 |
{ |
487 | 487 |
gomory.minCutMap(s,t,_cut); |
488 | 488 |
if(!side) |
489 | 489 |
for(typename Graph::NodeIt n(_graph);n!=INVALID;++n) |
490 | 490 |
_cut[n]=!_cut[n]; |
491 | 491 |
|
492 | 492 |
for(_node_it=typename Graph::NodeIt(_graph); |
493 | 493 |
_node_it!=INVALID && !_cut[_node_it]; |
494 | 494 |
++_node_it) {} |
495 | 495 |
_arc_it = _node_it!=INVALID ? |
496 | 496 |
typename Graph::OutArcIt(_graph,_node_it) : INVALID; |
497 | 497 |
while(_node_it!=INVALID && _arc_it == INVALID) |
498 | 498 |
{ |
499 | 499 |
for(++_node_it; _node_it!=INVALID&&!_cut[_node_it]; ++_node_it) {} |
500 | 500 |
if(_node_it!=INVALID) |
501 | 501 |
_arc_it= typename Graph::OutArcIt(_graph,_node_it); |
502 | 502 |
} |
503 | 503 |
while(_arc_it!=INVALID && _cut[_graph.target(_arc_it)]) step(); |
504 | 504 |
} |
505 | 505 |
/// Conversion to \c Arc |
506 | 506 |
|
507 | 507 |
/// Conversion to \c Arc. |
508 | 508 |
/// |
509 | 509 |
operator typename Graph::Arc() const |
510 | 510 |
{ |
511 | 511 |
return _arc_it; |
512 | 512 |
} |
513 | 513 |
/// Conversion to \c Edge |
514 | 514 |
|
515 | 515 |
/// Conversion to \c Edge. |
516 | 516 |
/// |
517 | 517 |
operator typename Graph::Edge() const |
518 | 518 |
{ |
519 | 519 |
return _arc_it; |
520 | 520 |
} |
521 | 521 |
bool operator==(Invalid) { return _node_it==INVALID; } |
522 | 522 |
bool operator!=(Invalid) { return _node_it!=INVALID; } |
523 | 523 |
/// Next edge |
524 | 524 |
|
525 | 525 |
/// Next edge. |
526 | 526 |
/// |
527 | 527 |
MinCutEdgeIt &operator++() |
528 | 528 |
{ |
529 | 529 |
step(); |
530 | 530 |
while(_arc_it!=INVALID && _cut[_graph.target(_arc_it)]) step(); |
531 | 531 |
return *this; |
532 | 532 |
} |
533 | 533 |
/// Postfix incrementation |
534 | 534 |
|
535 | 535 |
/// Postfix incrementation. |
536 | 536 |
/// |
537 | 537 |
/// \warning This incrementation |
538 | 538 |
/// returns an \c Arc, not a \c MinCutEdgeIt, as one may expect. |
539 | 539 |
typename Graph::Arc operator++(int) |
540 | 540 |
{ |
541 | 541 |
typename Graph::Arc e=*this; |
542 | 542 |
++(*this); |
543 | 543 |
return e; |
544 | 544 |
} |
545 | 545 |
}; |
546 | 546 |
|
547 | 547 |
}; |
548 | 548 |
|
549 | 549 |
} |
550 | 550 |
|
551 | 551 |
#endif |
1 | 1 |
/* -*- mode: C++; indent-tabs-mode: nil; -*- |
2 | 2 |
* |
3 | 3 |
* This file is a part of LEMON, a generic C++ optimization library. |
4 | 4 |
* |
5 | 5 |
* Copyright (C) 2003-2009 |
6 | 6 |
* Egervary Jeno Kombinatorikus Optimalizalasi Kutatocsoport |
7 | 7 |
* (Egervary Research Group on Combinatorial Optimization, EGRES). |
8 | 8 |
* |
9 | 9 |
* Permission to use, modify and distribute this software is granted |
10 | 10 |
* provided that this copyright notice appears in all copies. For |
11 | 11 |
* precise terms see the accompanying LICENSE file. |
12 | 12 |
* |
13 | 13 |
* This software is provided "AS IS" with no warranty of any kind, |
14 | 14 |
* express or implied, and with no claim as to its suitability for any |
15 | 15 |
* purpose. |
16 | 16 |
* |
17 | 17 |
*/ |
18 | 18 |
|
19 | 19 |
#ifndef LEMON_HAO_ORLIN_H |
20 | 20 |
#define LEMON_HAO_ORLIN_H |
21 | 21 |
|
22 | 22 |
#include <vector> |
23 | 23 |
#include <list> |
24 | 24 |
#include <limits> |
25 | 25 |
|
26 | 26 |
#include <lemon/maps.h> |
27 | 27 |
#include <lemon/core.h> |
28 | 28 |
#include <lemon/tolerance.h> |
29 | 29 |
|
30 | 30 |
/// \file |
31 | 31 |
/// \ingroup min_cut |
32 | 32 |
/// \brief Implementation of the Hao-Orlin algorithm. |
33 | 33 |
/// |
34 | 34 |
/// Implementation of the Hao-Orlin algorithm class for testing network |
35 | 35 |
/// reliability. |
36 | 36 |
|
37 | 37 |
namespace lemon { |
38 | 38 |
|
39 | 39 |
/// \ingroup min_cut |
40 | 40 |
/// |
41 | 41 |
/// \brief %Hao-Orlin algorithm to find a minimum cut in directed graphs. |
42 | 42 |
/// |
43 | 43 |
/// Hao-Orlin calculates a minimum cut in a directed graph |
44 | 44 |
/// \f$D=(V,A)\f$. It takes a fixed node \f$ source \in V \f$ and |
45 | 45 |
/// consists of two phases: in the first phase it determines a |
46 | 46 |
/// minimum cut with \f$ source \f$ on the source-side (i.e. a set |
47 | 47 |
/// \f$ X\subsetneq V \f$ with \f$ source \in X \f$ and minimal |
48 | 48 |
/// out-degree) and in the second phase it determines a minimum cut |
49 | 49 |
/// with \f$ source \f$ on the sink-side (i.e. a set |
50 | 50 |
/// \f$ X\subsetneq V \f$ with \f$ source \notin X \f$ and minimal |
51 | 51 |
/// out-degree). Obviously, the smaller of these two cuts will be a |
52 | 52 |
/// minimum cut of \f$ D \f$. The algorithm is a modified |
53 | 53 |
/// push-relabel preflow algorithm and our implementation calculates |
54 | 54 |
/// the minimum cut in \f$ O(n^2\sqrt{m}) \f$ time (we use the |
55 | 55 |
/// highest-label rule), or in \f$O(nm)\f$ for unit capacities. The |
56 | 56 |
/// purpose of such algorithm is testing network reliability. For an |
57 | 57 |
/// undirected graph you can run just the first phase of the |
58 | 58 |
/// algorithm or you can use the algorithm of Nagamochi and Ibaraki |
59 | 59 |
/// which solves the undirected problem in |
60 | 60 |
/// \f$ O(nm + n^2 \log n) \f$ time: it is implemented in the |
61 | 61 |
/// NagamochiIbaraki algorithm class. |
62 | 62 |
/// |
63 | 63 |
/// \param GR The digraph class the algorithm runs on. |
64 | 64 |
/// \param CAP An arc map of capacities which can be any numreric type. |
65 | 65 |
/// The default type is \ref concepts::Digraph::ArcMap "GR::ArcMap<int>". |
66 | 66 |
/// \param TOL Tolerance class for handling inexact computations. The |
67 | 67 |
/// default tolerance type is \ref Tolerance "Tolerance<CAP::Value>". |
68 | 68 |
#ifdef DOXYGEN |
69 | 69 |
template <typename GR, typename CAP, typename TOL> |
70 | 70 |
#else |
71 | 71 |
template <typename GR, |
72 | 72 |
typename CAP = typename GR::template ArcMap<int>, |
73 | 73 |
typename TOL = Tolerance<typename CAP::Value> > |
74 | 74 |
#endif |
75 | 75 |
class HaoOrlin { |
76 | 76 |
private: |
77 | 77 |
|
78 | 78 |
typedef GR Digraph; |
79 | 79 |
typedef CAP CapacityMap; |
80 | 80 |
typedef TOL Tolerance; |
81 | 81 |
|
82 | 82 |
typedef typename CapacityMap::Value Value; |
83 | 83 |
|
84 | 84 |
TEMPLATE_GRAPH_TYPEDEFS(Digraph); |
85 | 85 |
|
86 | 86 |
const Digraph& _graph; |
87 | 87 |
const CapacityMap* _capacity; |
88 | 88 |
|
89 | 89 |
typedef typename Digraph::template ArcMap<Value> FlowMap; |
90 | 90 |
FlowMap* _flow; |
91 | 91 |
|
92 | 92 |
Node _source; |
93 | 93 |
|
94 | 94 |
int _node_num; |
95 | 95 |
|
96 | 96 |
// Bucketing structure |
97 | 97 |
std::vector<Node> _first, _last; |
98 | 98 |
typename Digraph::template NodeMap<Node>* _next; |
99 | 99 |
typename Digraph::template NodeMap<Node>* _prev; |
100 | 100 |
typename Digraph::template NodeMap<bool>* _active; |
101 | 101 |
typename Digraph::template NodeMap<int>* _bucket; |
102 | 102 |
|
103 | 103 |
std::vector<bool> _dormant; |
104 | 104 |
|
105 | 105 |
std::list<std::list<int> > _sets; |
106 | 106 |
std::list<int>::iterator _highest; |
107 | 107 |
|
108 | 108 |
typedef typename Digraph::template NodeMap<Value> ExcessMap; |
109 | 109 |
ExcessMap* _excess; |
110 | 110 |
|
111 | 111 |
typedef typename Digraph::template NodeMap<bool> SourceSetMap; |
112 | 112 |
SourceSetMap* _source_set; |
113 | 113 |
|
114 | 114 |
Value _min_cut; |
115 | 115 |
|
116 | 116 |
typedef typename Digraph::template NodeMap<bool> MinCutMap; |
117 | 117 |
MinCutMap* _min_cut_map; |
118 | 118 |
|
119 | 119 |
Tolerance _tolerance; |
120 | 120 |
|
121 | 121 |
public: |
122 | 122 |
|
123 | 123 |
/// \brief Constructor |
124 | 124 |
/// |
125 | 125 |
/// Constructor of the algorithm class. |
126 | 126 |
HaoOrlin(const Digraph& graph, const CapacityMap& capacity, |
127 | 127 |
const Tolerance& tolerance = Tolerance()) : |
128 | 128 |
_graph(graph), _capacity(&capacity), _flow(0), _source(), |
129 | 129 |
_node_num(), _first(), _last(), _next(0), _prev(0), |
130 | 130 |
_active(0), _bucket(0), _dormant(), _sets(), _highest(), |
131 | 131 |
_excess(0), _source_set(0), _min_cut(), _min_cut_map(0), |
132 | 132 |
_tolerance(tolerance) {} |
133 | 133 |
|
134 | 134 |
~HaoOrlin() { |
135 | 135 |
if (_min_cut_map) { |
136 | 136 |
delete _min_cut_map; |
137 | 137 |
} |
138 | 138 |
if (_source_set) { |
139 | 139 |
delete _source_set; |
140 | 140 |
} |
141 | 141 |
if (_excess) { |
142 | 142 |
delete _excess; |
143 | 143 |
} |
144 | 144 |
if (_next) { |
145 | 145 |
delete _next; |
146 | 146 |
} |
147 | 147 |
if (_prev) { |
148 | 148 |
delete _prev; |
149 | 149 |
} |
150 | 150 |
if (_active) { |
151 | 151 |
delete _active; |
152 | 152 |
} |
153 | 153 |
if (_bucket) { |
154 | 154 |
delete _bucket; |
155 | 155 |
} |
156 | 156 |
if (_flow) { |
157 | 157 |
delete _flow; |
158 | 158 |
} |
159 | 159 |
} |
160 | 160 |
|
161 | 161 |
private: |
162 | 162 |
|
163 | 163 |
void activate(const Node& i) { |
164 |
_active |
|
164 |
(*_active)[i] = true; |
|
165 | 165 |
|
166 | 166 |
int bucket = (*_bucket)[i]; |
167 | 167 |
|
168 | 168 |
if ((*_prev)[i] == INVALID || (*_active)[(*_prev)[i]]) return; |
169 | 169 |
//unlace |
170 |
_next |
|
170 |
(*_next)[(*_prev)[i]] = (*_next)[i]; |
|
171 | 171 |
if ((*_next)[i] != INVALID) { |
172 |
_prev |
|
172 |
(*_prev)[(*_next)[i]] = (*_prev)[i]; |
|
173 | 173 |
} else { |
174 | 174 |
_last[bucket] = (*_prev)[i]; |
175 | 175 |
} |
176 | 176 |
//lace |
177 |
_next->set(i, _first[bucket]); |
|
178 |
_prev->set(_first[bucket], i); |
|
179 |
|
|
177 |
(*_next)[i] = _first[bucket]; |
|
178 |
(*_prev)[_first[bucket]] = i; |
|
179 |
(*_prev)[i] = INVALID; |
|
180 | 180 |
_first[bucket] = i; |
181 | 181 |
} |
182 | 182 |
|
183 | 183 |
void deactivate(const Node& i) { |
184 |
_active |
|
184 |
(*_active)[i] = false; |
|
185 | 185 |
int bucket = (*_bucket)[i]; |
186 | 186 |
|
187 | 187 |
if ((*_next)[i] == INVALID || !(*_active)[(*_next)[i]]) return; |
188 | 188 |
|
189 | 189 |
//unlace |
190 |
_prev |
|
190 |
(*_prev)[(*_next)[i]] = (*_prev)[i]; |
|
191 | 191 |
if ((*_prev)[i] != INVALID) { |
192 |
_next |
|
192 |
(*_next)[(*_prev)[i]] = (*_next)[i]; |
|
193 | 193 |
} else { |
194 | 194 |
_first[bucket] = (*_next)[i]; |
195 | 195 |
} |
196 | 196 |
//lace |
197 |
_prev->set(i, _last[bucket]); |
|
198 |
_next->set(_last[bucket], i); |
|
199 |
|
|
197 |
(*_prev)[i] = _last[bucket]; |
|
198 |
(*_next)[_last[bucket]] = i; |
|
199 |
(*_next)[i] = INVALID; |
|
200 | 200 |
_last[bucket] = i; |
201 | 201 |
} |
202 | 202 |
|
203 | 203 |
void addItem(const Node& i, int bucket) { |
204 | 204 |
(*_bucket)[i] = bucket; |
205 | 205 |
if (_last[bucket] != INVALID) { |
206 |
_prev->set(i, _last[bucket]); |
|
207 |
_next->set(_last[bucket], i); |
|
208 |
|
|
206 |
(*_prev)[i] = _last[bucket]; |
|
207 |
(*_next)[_last[bucket]] = i; |
|
208 |
(*_next)[i] = INVALID; |
|
209 | 209 |
_last[bucket] = i; |
210 | 210 |
} else { |
211 |
_prev |
|
211 |
(*_prev)[i] = INVALID; |
|
212 | 212 |
_first[bucket] = i; |
213 |
_next |
|
213 |
(*_next)[i] = INVALID; |
|
214 | 214 |
_last[bucket] = i; |
215 | 215 |
} |
216 | 216 |
} |
217 | 217 |
|
218 | 218 |
void findMinCutOut() { |
219 | 219 |
|
220 | 220 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
221 |
_excess |
|
221 |
(*_excess)[n] = 0; |
|
222 | 222 |
} |
223 | 223 |
|
224 | 224 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
225 |
_flow |
|
225 |
(*_flow)[a] = 0; |
|
226 | 226 |
} |
227 | 227 |
|
228 | 228 |
int bucket_num = 0; |
229 | 229 |
std::vector<Node> queue(_node_num); |
230 | 230 |
int qfirst = 0, qlast = 0, qsep = 0; |
231 | 231 |
|
232 | 232 |
{ |
233 | 233 |
typename Digraph::template NodeMap<bool> reached(_graph, false); |
234 | 234 |
|
235 |
reached |
|
235 |
reached[_source] = true; |
|
236 | 236 |
bool first_set = true; |
237 | 237 |
|
238 | 238 |
for (NodeIt t(_graph); t != INVALID; ++t) { |
239 | 239 |
if (reached[t]) continue; |
240 | 240 |
_sets.push_front(std::list<int>()); |
241 | 241 |
|
242 | 242 |
queue[qlast++] = t; |
243 |
reached |
|
243 |
reached[t] = true; |
|
244 | 244 |
|
245 | 245 |
while (qfirst != qlast) { |
246 | 246 |
if (qsep == qfirst) { |
247 | 247 |
++bucket_num; |
248 | 248 |
_sets.front().push_front(bucket_num); |
249 | 249 |
_dormant[bucket_num] = !first_set; |
250 | 250 |
_first[bucket_num] = _last[bucket_num] = INVALID; |
251 | 251 |
qsep = qlast; |
252 | 252 |
} |
253 | 253 |
|
254 | 254 |
Node n = queue[qfirst++]; |
255 | 255 |
addItem(n, bucket_num); |
256 | 256 |
|
257 | 257 |
for (InArcIt a(_graph, n); a != INVALID; ++a) { |
258 | 258 |
Node u = _graph.source(a); |
259 | 259 |
if (!reached[u] && _tolerance.positive((*_capacity)[a])) { |
260 |
reached |
|
260 |
reached[u] = true; |
|
261 | 261 |
queue[qlast++] = u; |
262 | 262 |
} |
263 | 263 |
} |
264 | 264 |
} |
265 | 265 |
first_set = false; |
266 | 266 |
} |
267 | 267 |
|
268 | 268 |
++bucket_num; |
269 |
_bucket |
|
269 |
(*_bucket)[_source] = 0; |
|
270 | 270 |
_dormant[0] = true; |
271 | 271 |
} |
272 |
_source_set |
|
272 |
(*_source_set)[_source] = true; |
|
273 | 273 |
|
274 | 274 |
Node target = _last[_sets.back().back()]; |
275 | 275 |
{ |
276 | 276 |
for (OutArcIt a(_graph, _source); a != INVALID; ++a) { |
277 | 277 |
if (_tolerance.positive((*_capacity)[a])) { |
278 | 278 |
Node u = _graph.target(a); |
279 |
_flow->set(a, (*_capacity)[a]); |
|
280 |
_excess->set(u, (*_excess)[u] + (*_capacity)[a]); |
|
279 |
(*_flow)[a] = (*_capacity)[a]; |
|
280 |
(*_excess)[u] += (*_capacity)[a]; |
|
281 | 281 |
if (!(*_active)[u] && u != _source) { |
282 | 282 |
activate(u); |
283 | 283 |
} |
284 | 284 |
} |
285 | 285 |
} |
286 | 286 |
|
287 | 287 |
if ((*_active)[target]) { |
288 | 288 |
deactivate(target); |
289 | 289 |
} |
290 | 290 |
|
291 | 291 |
_highest = _sets.back().begin(); |
292 | 292 |
while (_highest != _sets.back().end() && |
293 | 293 |
!(*_active)[_first[*_highest]]) { |
294 | 294 |
++_highest; |
295 | 295 |
} |
296 | 296 |
} |
297 | 297 |
|
298 | 298 |
while (true) { |
299 | 299 |
while (_highest != _sets.back().end()) { |
300 | 300 |
Node n = _first[*_highest]; |
301 | 301 |
Value excess = (*_excess)[n]; |
302 | 302 |
int next_bucket = _node_num; |
303 | 303 |
|
304 | 304 |
int under_bucket; |
305 | 305 |
if (++std::list<int>::iterator(_highest) == _sets.back().end()) { |
306 | 306 |
under_bucket = -1; |
307 | 307 |
} else { |
308 | 308 |
under_bucket = *(++std::list<int>::iterator(_highest)); |
309 | 309 |
} |
310 | 310 |
|
311 | 311 |
for (OutArcIt a(_graph, n); a != INVALID; ++a) { |
312 | 312 |
Node v = _graph.target(a); |
313 | 313 |
if (_dormant[(*_bucket)[v]]) continue; |
314 | 314 |
Value rem = (*_capacity)[a] - (*_flow)[a]; |
315 | 315 |
if (!_tolerance.positive(rem)) continue; |
316 | 316 |
if ((*_bucket)[v] == under_bucket) { |
317 | 317 |
if (!(*_active)[v] && v != target) { |
318 | 318 |
activate(v); |
319 | 319 |
} |
320 | 320 |
if (!_tolerance.less(rem, excess)) { |
321 |
_flow->set(a, (*_flow)[a] + excess); |
|
322 |
_excess->set(v, (*_excess)[v] + excess); |
|
321 |
(*_flow)[a] += excess; |
|
322 |
(*_excess)[v] += excess; |
|
323 | 323 |
excess = 0; |
324 | 324 |
goto no_more_push; |
325 | 325 |
} else { |
326 | 326 |
excess -= rem; |
327 |
_excess->set(v, (*_excess)[v] + rem); |
|
328 |
_flow->set(a, (*_capacity)[a]); |
|
327 |
(*_excess)[v] += rem; |
|
328 |
(*_flow)[a] = (*_capacity)[a]; |
|
329 | 329 |
} |
330 | 330 |
} else if (next_bucket > (*_bucket)[v]) { |
331 | 331 |
next_bucket = (*_bucket)[v]; |
332 | 332 |
} |
333 | 333 |
} |
334 | 334 |
|
335 | 335 |
for (InArcIt a(_graph, n); a != INVALID; ++a) { |
336 | 336 |
Node v = _graph.source(a); |
337 | 337 |
if (_dormant[(*_bucket)[v]]) continue; |
338 | 338 |
Value rem = (*_flow)[a]; |
339 | 339 |
if (!_tolerance.positive(rem)) continue; |
340 | 340 |
if ((*_bucket)[v] == under_bucket) { |
341 | 341 |
if (!(*_active)[v] && v != target) { |
342 | 342 |
activate(v); |
343 | 343 |
} |
344 | 344 |
if (!_tolerance.less(rem, excess)) { |
345 |
_flow->set(a, (*_flow)[a] - excess); |
|
346 |
_excess->set(v, (*_excess)[v] + excess); |
|
345 |
(*_flow)[a] -= excess; |
|
346 |
(*_excess)[v] += excess; |
|
347 | 347 |
excess = 0; |
348 | 348 |
goto no_more_push; |
349 | 349 |
} else { |
350 | 350 |
excess -= rem; |
351 |
_excess->set(v, (*_excess)[v] + rem); |
|
352 |
_flow->set(a, 0); |
|
351 |
(*_excess)[v] += rem; |
|
352 |
(*_flow)[a] = 0; |
|
353 | 353 |
} |
354 | 354 |
} else if (next_bucket > (*_bucket)[v]) { |
355 | 355 |
next_bucket = (*_bucket)[v]; |
356 | 356 |
} |
357 | 357 |
} |
358 | 358 |
|
359 | 359 |
no_more_push: |
360 | 360 |
|
361 |
_excess |
|
361 |
(*_excess)[n] = excess; |
|
362 | 362 |
|
363 | 363 |
if (excess != 0) { |
364 | 364 |
if ((*_next)[n] == INVALID) { |
365 | 365 |
typename std::list<std::list<int> >::iterator new_set = |
366 | 366 |
_sets.insert(--_sets.end(), std::list<int>()); |
367 | 367 |
new_set->splice(new_set->end(), _sets.back(), |
368 | 368 |
_sets.back().begin(), ++_highest); |
369 | 369 |
for (std::list<int>::iterator it = new_set->begin(); |
370 | 370 |
it != new_set->end(); ++it) { |
371 | 371 |
_dormant[*it] = true; |
372 | 372 |
} |
373 | 373 |
while (_highest != _sets.back().end() && |
374 | 374 |
!(*_active)[_first[*_highest]]) { |
375 | 375 |
++_highest; |
376 | 376 |
} |
377 | 377 |
} else if (next_bucket == _node_num) { |
378 | 378 |
_first[(*_bucket)[n]] = (*_next)[n]; |
379 |
_prev |
|
379 |
(*_prev)[(*_next)[n]] = INVALID; |
|
380 | 380 |
|
381 | 381 |
std::list<std::list<int> >::iterator new_set = |
382 | 382 |
_sets.insert(--_sets.end(), std::list<int>()); |
383 | 383 |
|
384 | 384 |
new_set->push_front(bucket_num); |
385 |
_bucket |
|
385 |
(*_bucket)[n] = bucket_num; |
|
386 | 386 |
_first[bucket_num] = _last[bucket_num] = n; |
387 |
_next->set(n, INVALID); |
|
388 |
_prev->set(n, INVALID); |
|
387 |
(*_next)[n] = INVALID; |
|
388 |
(*_prev)[n] = INVALID; |
|
389 | 389 |
_dormant[bucket_num] = true; |
390 | 390 |
++bucket_num; |
391 | 391 |
|
392 | 392 |
while (_highest != _sets.back().end() && |
393 | 393 |
!(*_active)[_first[*_highest]]) { |
394 | 394 |
++_highest; |
395 | 395 |
} |
396 | 396 |
} else { |
397 | 397 |
_first[*_highest] = (*_next)[n]; |
398 |
_prev |
|
398 |
(*_prev)[(*_next)[n]] = INVALID; |
|
399 | 399 |
|
400 | 400 |
while (next_bucket != *_highest) { |
401 | 401 |
--_highest; |
402 | 402 |
} |
403 | 403 |
|
404 | 404 |
if (_highest == _sets.back().begin()) { |
405 | 405 |
_sets.back().push_front(bucket_num); |
406 | 406 |
_dormant[bucket_num] = false; |
407 | 407 |
_first[bucket_num] = _last[bucket_num] = INVALID; |
408 | 408 |
++bucket_num; |
409 | 409 |
} |
410 | 410 |
--_highest; |
411 | 411 |
|
412 |
_bucket->set(n, *_highest); |
|
413 |
_next->set(n, _first[*_highest]); |
|
412 |
(*_bucket)[n] = *_highest; |
|
413 |
(*_next)[n] = _first[*_highest]; |
|
414 | 414 |
if (_first[*_highest] != INVALID) { |
415 |
_prev |
|
415 |
(*_prev)[_first[*_highest]] = n; |
|
416 | 416 |
} else { |
417 | 417 |
_last[*_highest] = n; |
418 | 418 |
} |
419 | 419 |
_first[*_highest] = n; |
420 | 420 |
} |
421 | 421 |
} else { |
422 | 422 |
|
423 | 423 |
deactivate(n); |
424 | 424 |
if (!(*_active)[_first[*_highest]]) { |
425 | 425 |
++_highest; |
426 | 426 |
if (_highest != _sets.back().end() && |
427 | 427 |
!(*_active)[_first[*_highest]]) { |
428 | 428 |
_highest = _sets.back().end(); |
429 | 429 |
} |
430 | 430 |
} |
431 | 431 |
} |
432 | 432 |
} |
433 | 433 |
|
434 | 434 |
if ((*_excess)[target] < _min_cut) { |
435 | 435 |
_min_cut = (*_excess)[target]; |
436 | 436 |
for (NodeIt i(_graph); i != INVALID; ++i) { |
437 |
_min_cut_map |
|
437 |
(*_min_cut_map)[i] = true; |
|
438 | 438 |
} |
439 | 439 |
for (std::list<int>::iterator it = _sets.back().begin(); |
440 | 440 |
it != _sets.back().end(); ++it) { |
441 | 441 |
Node n = _first[*it]; |
442 | 442 |
while (n != INVALID) { |
443 |
_min_cut_map |
|
443 |
(*_min_cut_map)[n] = false; |
|
444 | 444 |
n = (*_next)[n]; |
445 | 445 |
} |
446 | 446 |
} |
447 | 447 |
} |
448 | 448 |
|
449 | 449 |
{ |
450 | 450 |
Node new_target; |
451 | 451 |
if ((*_prev)[target] != INVALID || (*_next)[target] != INVALID) { |
452 | 452 |
if ((*_next)[target] == INVALID) { |
453 | 453 |
_last[(*_bucket)[target]] = (*_prev)[target]; |
454 | 454 |
new_target = (*_prev)[target]; |
455 | 455 |
} else { |
456 |
_prev |
|
456 |
(*_prev)[(*_next)[target]] = (*_prev)[target]; |
|
457 | 457 |
new_target = (*_next)[target]; |
458 | 458 |
} |
459 | 459 |
if ((*_prev)[target] == INVALID) { |
460 | 460 |
_first[(*_bucket)[target]] = (*_next)[target]; |
461 | 461 |
} else { |
462 |
_next |
|
462 |
(*_next)[(*_prev)[target]] = (*_next)[target]; |
|
463 | 463 |
} |
464 | 464 |
} else { |
465 | 465 |
_sets.back().pop_back(); |
466 | 466 |
if (_sets.back().empty()) { |
467 | 467 |
_sets.pop_back(); |
468 | 468 |
if (_sets.empty()) |
469 | 469 |
break; |
470 | 470 |
for (std::list<int>::iterator it = _sets.back().begin(); |
471 | 471 |
it != _sets.back().end(); ++it) { |
472 | 472 |
_dormant[*it] = false; |
473 | 473 |
} |
474 | 474 |
} |
475 | 475 |
new_target = _last[_sets.back().back()]; |
476 | 476 |
} |
477 | 477 |
|
478 |
_bucket |
|
478 |
(*_bucket)[target] = 0; |
|
479 | 479 |
|
480 |
_source_set |
|
480 |
(*_source_set)[target] = true; |
|
481 | 481 |
for (OutArcIt a(_graph, target); a != INVALID; ++a) { |
482 | 482 |
Value rem = (*_capacity)[a] - (*_flow)[a]; |
483 | 483 |
if (!_tolerance.positive(rem)) continue; |
484 | 484 |
Node v = _graph.target(a); |
485 | 485 |
if (!(*_active)[v] && !(*_source_set)[v]) { |
486 | 486 |
activate(v); |
487 | 487 |
} |
488 |
_excess->set(v, (*_excess)[v] + rem); |
|
489 |
_flow->set(a, (*_capacity)[a]); |
|
488 |
(*_excess)[v] += rem; |
|
489 |
(*_flow)[a] = (*_capacity)[a]; |
|
490 | 490 |
} |
491 | 491 |
|
492 | 492 |
for (InArcIt a(_graph, target); a != INVALID; ++a) { |
493 | 493 |
Value rem = (*_flow)[a]; |
494 | 494 |
if (!_tolerance.positive(rem)) continue; |
495 | 495 |
Node v = _graph.source(a); |
496 | 496 |
if (!(*_active)[v] && !(*_source_set)[v]) { |
497 | 497 |
activate(v); |
498 | 498 |
} |
499 |
_excess->set(v, (*_excess)[v] + rem); |
|
500 |
_flow->set(a, 0); |
|
499 |
(*_excess)[v] += rem; |
|
500 |
(*_flow)[a] = 0; |
|
501 | 501 |
} |
502 | 502 |
|
503 | 503 |
target = new_target; |
504 | 504 |
if ((*_active)[target]) { |
505 | 505 |
deactivate(target); |
506 | 506 |
} |
507 | 507 |
|
508 | 508 |
_highest = _sets.back().begin(); |
509 | 509 |
while (_highest != _sets.back().end() && |
510 | 510 |
!(*_active)[_first[*_highest]]) { |
511 | 511 |
++_highest; |
512 | 512 |
} |
513 | 513 |
} |
514 | 514 |
} |
515 | 515 |
} |
516 | 516 |
|
517 | 517 |
void findMinCutIn() { |
518 | 518 |
|
519 | 519 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
520 |
_excess |
|
520 |
(*_excess)[n] = 0; |
|
521 | 521 |
} |
522 | 522 |
|
523 | 523 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
524 |
_flow |
|
524 |
(*_flow)[a] = 0; |
|
525 | 525 |
} |
526 | 526 |
|
527 | 527 |
int bucket_num = 0; |
528 | 528 |
std::vector<Node> queue(_node_num); |
529 | 529 |
int qfirst = 0, qlast = 0, qsep = 0; |
530 | 530 |
|
531 | 531 |
{ |
532 | 532 |
typename Digraph::template NodeMap<bool> reached(_graph, false); |
533 | 533 |
|
534 |
reached |
|
534 |
reached[_source] = true; |
|
535 | 535 |
|
536 | 536 |
bool first_set = true; |
537 | 537 |
|
538 | 538 |
for (NodeIt t(_graph); t != INVALID; ++t) { |
539 | 539 |
if (reached[t]) continue; |
540 | 540 |
_sets.push_front(std::list<int>()); |
541 | 541 |
|
542 | 542 |
queue[qlast++] = t; |
543 |
reached |
|
543 |
reached[t] = true; |
|
544 | 544 |
|
545 | 545 |
while (qfirst != qlast) { |
546 | 546 |
if (qsep == qfirst) { |
547 | 547 |
++bucket_num; |
548 | 548 |
_sets.front().push_front(bucket_num); |
549 | 549 |
_dormant[bucket_num] = !first_set; |
550 | 550 |
_first[bucket_num] = _last[bucket_num] = INVALID; |
551 | 551 |
qsep = qlast; |
552 | 552 |
} |
553 | 553 |
|
554 | 554 |
Node n = queue[qfirst++]; |
555 | 555 |
addItem(n, bucket_num); |
556 | 556 |
|
557 | 557 |
for (OutArcIt a(_graph, n); a != INVALID; ++a) { |
558 | 558 |
Node u = _graph.target(a); |
559 | 559 |
if (!reached[u] && _tolerance.positive((*_capacity)[a])) { |
560 |
reached |
|
560 |
reached[u] = true; |
|
561 | 561 |
queue[qlast++] = u; |
562 | 562 |
} |
563 | 563 |
} |
564 | 564 |
} |
565 | 565 |
first_set = false; |
566 | 566 |
} |
567 | 567 |
|
568 | 568 |
++bucket_num; |
569 |
_bucket |
|
569 |
(*_bucket)[_source] = 0; |
|
570 | 570 |
_dormant[0] = true; |
571 | 571 |
} |
572 |
_source_set |
|
572 |
(*_source_set)[_source] = true; |
|
573 | 573 |
|
574 | 574 |
Node target = _last[_sets.back().back()]; |
575 | 575 |
{ |
576 | 576 |
for (InArcIt a(_graph, _source); a != INVALID; ++a) { |
577 | 577 |
if (_tolerance.positive((*_capacity)[a])) { |
578 | 578 |
Node u = _graph.source(a); |
579 |
_flow->set(a, (*_capacity)[a]); |
|
580 |
_excess->set(u, (*_excess)[u] + (*_capacity)[a]); |
|
579 |
(*_flow)[a] = (*_capacity)[a]; |
|
580 |
(*_excess)[u] += (*_capacity)[a]; |
|
581 | 581 |
if (!(*_active)[u] && u != _source) { |
582 | 582 |
activate(u); |
583 | 583 |
} |
584 | 584 |
} |
585 | 585 |
} |
586 | 586 |
if ((*_active)[target]) { |
587 | 587 |
deactivate(target); |
588 | 588 |
} |
589 | 589 |
|
590 | 590 |
_highest = _sets.back().begin(); |
591 | 591 |
while (_highest != _sets.back().end() && |
592 | 592 |
!(*_active)[_first[*_highest]]) { |
593 | 593 |
++_highest; |
594 | 594 |
} |
595 | 595 |
} |
596 | 596 |
|
597 | 597 |
|
598 | 598 |
while (true) { |
599 | 599 |
while (_highest != _sets.back().end()) { |
600 | 600 |
Node n = _first[*_highest]; |
601 | 601 |
Value excess = (*_excess)[n]; |
602 | 602 |
int next_bucket = _node_num; |
603 | 603 |
|
604 | 604 |
int under_bucket; |
605 | 605 |
if (++std::list<int>::iterator(_highest) == _sets.back().end()) { |
606 | 606 |
under_bucket = -1; |
607 | 607 |
} else { |
608 | 608 |
under_bucket = *(++std::list<int>::iterator(_highest)); |
609 | 609 |
} |
610 | 610 |
|
611 | 611 |
for (InArcIt a(_graph, n); a != INVALID; ++a) { |
612 | 612 |
Node v = _graph.source(a); |
613 | 613 |
if (_dormant[(*_bucket)[v]]) continue; |
614 | 614 |
Value rem = (*_capacity)[a] - (*_flow)[a]; |
615 | 615 |
if (!_tolerance.positive(rem)) continue; |
616 | 616 |
if ((*_bucket)[v] == under_bucket) { |
617 | 617 |
if (!(*_active)[v] && v != target) { |
618 | 618 |
activate(v); |
619 | 619 |
} |
620 | 620 |
if (!_tolerance.less(rem, excess)) { |
621 |
_flow->set(a, (*_flow)[a] + excess); |
|
622 |
_excess->set(v, (*_excess)[v] + excess); |
|
621 |
(*_flow)[a] += excess; |
|
622 |
(*_excess)[v] += excess; |
|
623 | 623 |
excess = 0; |
624 | 624 |
goto no_more_push; |
625 | 625 |
} else { |
626 | 626 |
excess -= rem; |
627 |
_excess->set(v, (*_excess)[v] + rem); |
|
628 |
_flow->set(a, (*_capacity)[a]); |
|
627 |
(*_excess)[v] += rem; |
|
628 |
(*_flow)[a] = (*_capacity)[a]; |
|
629 | 629 |
} |
630 | 630 |
} else if (next_bucket > (*_bucket)[v]) { |
631 | 631 |
next_bucket = (*_bucket)[v]; |
632 | 632 |
} |
633 | 633 |
} |
634 | 634 |
|
635 | 635 |
for (OutArcIt a(_graph, n); a != INVALID; ++a) { |
636 | 636 |
Node v = _graph.target(a); |
637 | 637 |
if (_dormant[(*_bucket)[v]]) continue; |
638 | 638 |
Value rem = (*_flow)[a]; |
639 | 639 |
if (!_tolerance.positive(rem)) continue; |
640 | 640 |
if ((*_bucket)[v] == under_bucket) { |
641 | 641 |
if (!(*_active)[v] && v != target) { |
642 | 642 |
activate(v); |
643 | 643 |
} |
644 | 644 |
if (!_tolerance.less(rem, excess)) { |
645 |
_flow->set(a, (*_flow)[a] - excess); |
|
646 |
_excess->set(v, (*_excess)[v] + excess); |
|
645 |
(*_flow)[a] -= excess; |
|
646 |
(*_excess)[v] += excess; |
|
647 | 647 |
excess = 0; |
648 | 648 |
goto no_more_push; |
649 | 649 |
} else { |
650 | 650 |
excess -= rem; |
651 |
_excess->set(v, (*_excess)[v] + rem); |
|
652 |
_flow->set(a, 0); |
|
651 |
(*_excess)[v] += rem; |
|
652 |
(*_flow)[a] = 0; |
|
653 | 653 |
} |
654 | 654 |
} else if (next_bucket > (*_bucket)[v]) { |
655 | 655 |
next_bucket = (*_bucket)[v]; |
656 | 656 |
} |
657 | 657 |
} |
658 | 658 |
|
659 | 659 |
no_more_push: |
660 | 660 |
|
661 |
_excess |
|
661 |
(*_excess)[n] = excess; |
|
662 | 662 |
|
663 | 663 |
if (excess != 0) { |
664 | 664 |
if ((*_next)[n] == INVALID) { |
665 | 665 |
typename std::list<std::list<int> >::iterator new_set = |
666 | 666 |
_sets.insert(--_sets.end(), std::list<int>()); |
667 | 667 |
new_set->splice(new_set->end(), _sets.back(), |
668 | 668 |
_sets.back().begin(), ++_highest); |
669 | 669 |
for (std::list<int>::iterator it = new_set->begin(); |
670 | 670 |
it != new_set->end(); ++it) { |
671 | 671 |
_dormant[*it] = true; |
672 | 672 |
} |
673 | 673 |
while (_highest != _sets.back().end() && |
674 | 674 |
!(*_active)[_first[*_highest]]) { |
675 | 675 |
++_highest; |
676 | 676 |
} |
677 | 677 |
} else if (next_bucket == _node_num) { |
678 | 678 |
_first[(*_bucket)[n]] = (*_next)[n]; |
679 |
_prev |
|
679 |
(*_prev)[(*_next)[n]] = INVALID; |
|
680 | 680 |
|
681 | 681 |
std::list<std::list<int> >::iterator new_set = |
682 | 682 |
_sets.insert(--_sets.end(), std::list<int>()); |
683 | 683 |
|
684 | 684 |
new_set->push_front(bucket_num); |
685 |
_bucket |
|
685 |
(*_bucket)[n] = bucket_num; |
|
686 | 686 |
_first[bucket_num] = _last[bucket_num] = n; |
687 |
_next->set(n, INVALID); |
|
688 |
_prev->set(n, INVALID); |
|
687 |
(*_next)[n] = INVALID; |
|
688 |
(*_prev)[n] = INVALID; |
|
689 | 689 |
_dormant[bucket_num] = true; |
690 | 690 |
++bucket_num; |
691 | 691 |
|
692 | 692 |
while (_highest != _sets.back().end() && |
693 | 693 |
!(*_active)[_first[*_highest]]) { |
694 | 694 |
++_highest; |
695 | 695 |
} |
696 | 696 |
} else { |
697 | 697 |
_first[*_highest] = (*_next)[n]; |
698 |
_prev |
|
698 |
(*_prev)[(*_next)[n]] = INVALID; |
|
699 | 699 |
|
700 | 700 |
while (next_bucket != *_highest) { |
701 | 701 |
--_highest; |
702 | 702 |
} |
703 | 703 |
if (_highest == _sets.back().begin()) { |
704 | 704 |
_sets.back().push_front(bucket_num); |
705 | 705 |
_dormant[bucket_num] = false; |
706 | 706 |
_first[bucket_num] = _last[bucket_num] = INVALID; |
707 | 707 |
++bucket_num; |
708 | 708 |
} |
709 | 709 |
--_highest; |
710 | 710 |
|
711 |
_bucket->set(n, *_highest); |
|
712 |
_next->set(n, _first[*_highest]); |
|
711 |
(*_bucket)[n] = *_highest; |
|
712 |
(*_next)[n] = _first[*_highest]; |
|
713 | 713 |
if (_first[*_highest] != INVALID) { |
714 |
_prev |
|
714 |
(*_prev)[_first[*_highest]] = n; |
|
715 | 715 |
} else { |
716 | 716 |
_last[*_highest] = n; |
717 | 717 |
} |
718 | 718 |
_first[*_highest] = n; |
719 | 719 |
} |
720 | 720 |
} else { |
721 | 721 |
|
722 | 722 |
deactivate(n); |
723 | 723 |
if (!(*_active)[_first[*_highest]]) { |
724 | 724 |
++_highest; |
725 | 725 |
if (_highest != _sets.back().end() && |
726 | 726 |
!(*_active)[_first[*_highest]]) { |
727 | 727 |
_highest = _sets.back().end(); |
728 | 728 |
} |
729 | 729 |
} |
730 | 730 |
} |
731 | 731 |
} |
732 | 732 |
|
733 | 733 |
if ((*_excess)[target] < _min_cut) { |
734 | 734 |
_min_cut = (*_excess)[target]; |
735 | 735 |
for (NodeIt i(_graph); i != INVALID; ++i) { |
736 |
_min_cut_map |
|
736 |
(*_min_cut_map)[i] = false; |
|
737 | 737 |
} |
738 | 738 |
for (std::list<int>::iterator it = _sets.back().begin(); |
739 | 739 |
it != _sets.back().end(); ++it) { |
740 | 740 |
Node n = _first[*it]; |
741 | 741 |
while (n != INVALID) { |
742 |
_min_cut_map |
|
742 |
(*_min_cut_map)[n] = true; |
|
743 | 743 |
n = (*_next)[n]; |
744 | 744 |
} |
745 | 745 |
} |
746 | 746 |
} |
747 | 747 |
|
748 | 748 |
{ |
749 | 749 |
Node new_target; |
750 | 750 |
if ((*_prev)[target] != INVALID || (*_next)[target] != INVALID) { |
751 | 751 |
if ((*_next)[target] == INVALID) { |
752 | 752 |
_last[(*_bucket)[target]] = (*_prev)[target]; |
753 | 753 |
new_target = (*_prev)[target]; |
754 | 754 |
} else { |
755 |
_prev |
|
755 |
(*_prev)[(*_next)[target]] = (*_prev)[target]; |
|
756 | 756 |
new_target = (*_next)[target]; |
757 | 757 |
} |
758 | 758 |
if ((*_prev)[target] == INVALID) { |
759 | 759 |
_first[(*_bucket)[target]] = (*_next)[target]; |
760 | 760 |
} else { |
761 |
_next |
|
761 |
(*_next)[(*_prev)[target]] = (*_next)[target]; |
|
762 | 762 |
} |
763 | 763 |
} else { |
764 | 764 |
_sets.back().pop_back(); |
765 | 765 |
if (_sets.back().empty()) { |
766 | 766 |
_sets.pop_back(); |
767 | 767 |
if (_sets.empty()) |
768 | 768 |
break; |
769 | 769 |
for (std::list<int>::iterator it = _sets.back().begin(); |
770 | 770 |
it != _sets.back().end(); ++it) { |
771 | 771 |
_dormant[*it] = false; |
772 | 772 |
} |
773 | 773 |
} |
774 | 774 |
new_target = _last[_sets.back().back()]; |
775 | 775 |
} |
776 | 776 |
|
777 |
_bucket |
|
777 |
(*_bucket)[target] = 0; |
|
778 | 778 |
|
779 |
_source_set |
|
779 |
(*_source_set)[target] = true; |
|
780 | 780 |
for (InArcIt a(_graph, target); a != INVALID; ++a) { |
781 | 781 |
Value rem = (*_capacity)[a] - (*_flow)[a]; |
782 | 782 |
if (!_tolerance.positive(rem)) continue; |
783 | 783 |
Node v = _graph.source(a); |
784 | 784 |
if (!(*_active)[v] && !(*_source_set)[v]) { |
785 | 785 |
activate(v); |
786 | 786 |
} |
787 |
_excess->set(v, (*_excess)[v] + rem); |
|
788 |
_flow->set(a, (*_capacity)[a]); |
|
787 |
(*_excess)[v] += rem; |
|
788 |
(*_flow)[a] = (*_capacity)[a]; |
|
789 | 789 |
} |
790 | 790 |
|
791 | 791 |
for (OutArcIt a(_graph, target); a != INVALID; ++a) { |
792 | 792 |
Value rem = (*_flow)[a]; |
793 | 793 |
if (!_tolerance.positive(rem)) continue; |
794 | 794 |
Node v = _graph.target(a); |
795 | 795 |
if (!(*_active)[v] && !(*_source_set)[v]) { |
796 | 796 |
activate(v); |
797 | 797 |
} |
798 |
_excess->set(v, (*_excess)[v] + rem); |
|
799 |
_flow->set(a, 0); |
|
798 |
(*_excess)[v] += rem; |
|
799 |
(*_flow)[a] = 0; |
|
800 | 800 |
} |
801 | 801 |
|
802 | 802 |
target = new_target; |
803 | 803 |
if ((*_active)[target]) { |
804 | 804 |
deactivate(target); |
805 | 805 |
} |
806 | 806 |
|
807 | 807 |
_highest = _sets.back().begin(); |
808 | 808 |
while (_highest != _sets.back().end() && |
809 | 809 |
!(*_active)[_first[*_highest]]) { |
810 | 810 |
++_highest; |
811 | 811 |
} |
812 | 812 |
} |
813 | 813 |
} |
814 | 814 |
} |
815 | 815 |
|
816 | 816 |
public: |
817 | 817 |
|
818 | 818 |
/// \name Execution control |
819 | 819 |
/// The simplest way to execute the algorithm is to use |
820 | 820 |
/// one of the member functions called \ref run(). |
821 | 821 |
/// \n |
822 | 822 |
/// If you need more control on the execution, |
823 | 823 |
/// first you must call \ref init(), then the \ref calculateIn() or |
824 | 824 |
/// \ref calculateOut() functions. |
825 | 825 |
|
826 | 826 |
/// @{ |
827 | 827 |
|
828 | 828 |
/// \brief Initializes the internal data structures. |
829 | 829 |
/// |
830 | 830 |
/// Initializes the internal data structures. It creates |
831 | 831 |
/// the maps, residual graph adaptors and some bucket structures |
832 | 832 |
/// for the algorithm. |
833 | 833 |
void init() { |
834 | 834 |
init(NodeIt(_graph)); |
835 | 835 |
} |
836 | 836 |
|
837 | 837 |
/// \brief Initializes the internal data structures. |
838 | 838 |
/// |
839 | 839 |
/// Initializes the internal data structures. It creates |
840 | 840 |
/// the maps, residual graph adaptor and some bucket structures |
841 | 841 |
/// for the algorithm. Node \c source is used as the push-relabel |
842 | 842 |
/// algorithm's source. |
843 | 843 |
void init(const Node& source) { |
844 | 844 |
_source = source; |
845 | 845 |
|
846 | 846 |
_node_num = countNodes(_graph); |
847 | 847 |
|
848 | 848 |
_first.resize(_node_num); |
849 | 849 |
_last.resize(_node_num); |
850 | 850 |
|
851 | 851 |
_dormant.resize(_node_num); |
852 | 852 |
|
853 | 853 |
if (!_flow) { |
854 | 854 |
_flow = new FlowMap(_graph); |
855 | 855 |
} |
856 | 856 |
if (!_next) { |
857 | 857 |
_next = new typename Digraph::template NodeMap<Node>(_graph); |
858 | 858 |
} |
859 | 859 |
if (!_prev) { |
860 | 860 |
_prev = new typename Digraph::template NodeMap<Node>(_graph); |
861 | 861 |
} |
862 | 862 |
if (!_active) { |
863 | 863 |
_active = new typename Digraph::template NodeMap<bool>(_graph); |
864 | 864 |
} |
865 | 865 |
if (!_bucket) { |
866 | 866 |
_bucket = new typename Digraph::template NodeMap<int>(_graph); |
867 | 867 |
} |
868 | 868 |
if (!_excess) { |
869 | 869 |
_excess = new ExcessMap(_graph); |
870 | 870 |
} |
871 | 871 |
if (!_source_set) { |
872 | 872 |
_source_set = new SourceSetMap(_graph); |
873 | 873 |
} |
874 | 874 |
if (!_min_cut_map) { |
875 | 875 |
_min_cut_map = new MinCutMap(_graph); |
876 | 876 |
} |
877 | 877 |
|
878 | 878 |
_min_cut = std::numeric_limits<Value>::max(); |
879 | 879 |
} |
880 | 880 |
|
881 | 881 |
|
882 | 882 |
/// \brief Calculates a minimum cut with \f$ source \f$ on the |
883 | 883 |
/// source-side. |
884 | 884 |
/// |
885 | 885 |
/// Calculates a minimum cut with \f$ source \f$ on the |
886 | 886 |
/// source-side (i.e. a set \f$ X\subsetneq V \f$ with |
887 | 887 |
/// \f$ source \in X \f$ and minimal out-degree). |
888 | 888 |
void calculateOut() { |
889 | 889 |
findMinCutOut(); |
890 | 890 |
} |
891 | 891 |
|
892 | 892 |
/// \brief Calculates a minimum cut with \f$ source \f$ on the |
893 | 893 |
/// target-side. |
894 | 894 |
/// |
895 | 895 |
/// Calculates a minimum cut with \f$ source \f$ on the |
896 | 896 |
/// target-side (i.e. a set \f$ X\subsetneq V \f$ with |
897 | 897 |
/// \f$ source \in X \f$ and minimal out-degree). |
898 | 898 |
void calculateIn() { |
899 | 899 |
findMinCutIn(); |
900 | 900 |
} |
901 | 901 |
|
902 | 902 |
|
903 | 903 |
/// \brief Runs the algorithm. |
904 | 904 |
/// |
905 | 905 |
/// Runs the algorithm. It finds nodes \c source and \c target |
906 | 906 |
/// arbitrarily and then calls \ref init(), \ref calculateOut() |
907 | 907 |
/// and \ref calculateIn(). |
908 | 908 |
void run() { |
909 | 909 |
init(); |
910 | 910 |
calculateOut(); |
911 | 911 |
calculateIn(); |
912 | 912 |
} |
913 | 913 |
|
914 | 914 |
/// \brief Runs the algorithm. |
915 | 915 |
/// |
916 | 916 |
/// Runs the algorithm. It uses the given \c source node, finds a |
917 | 917 |
/// proper \c target and then calls the \ref init(), \ref |
918 | 918 |
/// calculateOut() and \ref calculateIn(). |
919 | 919 |
void run(const Node& s) { |
920 | 920 |
init(s); |
921 | 921 |
calculateOut(); |
922 | 922 |
calculateIn(); |
923 | 923 |
} |
924 | 924 |
|
925 | 925 |
/// @} |
926 | 926 |
|
927 | 927 |
/// \name Query Functions |
928 | 928 |
/// The result of the %HaoOrlin algorithm |
929 | 929 |
/// can be obtained using these functions. |
930 | 930 |
/// \n |
931 | 931 |
/// Before using these functions, either \ref run(), \ref |
932 | 932 |
/// calculateOut() or \ref calculateIn() must be called. |
933 | 933 |
|
934 | 934 |
/// @{ |
935 | 935 |
|
936 | 936 |
/// \brief Returns the value of the minimum value cut. |
937 | 937 |
/// |
938 | 938 |
/// Returns the value of the minimum value cut. |
939 | 939 |
Value minCutValue() const { |
940 | 940 |
return _min_cut; |
941 | 941 |
} |
942 | 942 |
|
943 | 943 |
|
944 | 944 |
/// \brief Returns a minimum cut. |
945 | 945 |
/// |
946 | 946 |
/// Sets \c nodeMap to the characteristic vector of a minimum |
947 | 947 |
/// value cut: it will give a nonempty set \f$ X\subsetneq V \f$ |
948 | 948 |
/// with minimal out-degree (i.e. \c nodeMap will be true exactly |
949 | 949 |
/// for the nodes of \f$ X \f$). \pre nodeMap should be a |
950 | 950 |
/// bool-valued node-map. |
951 | 951 |
template <typename NodeMap> |
952 | 952 |
Value minCutMap(NodeMap& nodeMap) const { |
953 | 953 |
for (NodeIt it(_graph); it != INVALID; ++it) { |
954 | 954 |
nodeMap.set(it, (*_min_cut_map)[it]); |
955 | 955 |
} |
956 | 956 |
return _min_cut; |
957 | 957 |
} |
958 | 958 |
|
959 | 959 |
/// @} |
960 | 960 |
|
961 | 961 |
}; //class HaoOrlin |
962 | 962 |
|
963 | 963 |
|
964 | 964 |
} //namespace lemon |
965 | 965 |
|
966 | 966 |
#endif //LEMON_HAO_ORLIN_H |
1 | 1 |
/* -*- mode: C++; indent-tabs-mode: nil; -*- |
2 | 2 |
* |
3 | 3 |
* This file is a part of LEMON, a generic C++ optimization library. |
4 | 4 |
* |
5 | 5 |
* Copyright (C) 2003-2009 |
6 | 6 |
* Egervary Jeno Kombinatorikus Optimalizalasi Kutatocsoport |
7 | 7 |
* (Egervary Research Group on Combinatorial Optimization, EGRES). |
8 | 8 |
* |
9 | 9 |
* Permission to use, modify and distribute this software is granted |
10 | 10 |
* provided that this copyright notice appears in all copies. For |
11 | 11 |
* precise terms see the accompanying LICENSE file. |
12 | 12 |
* |
13 | 13 |
* This software is provided "AS IS" with no warranty of any kind, |
14 | 14 |
* express or implied, and with no claim as to its suitability for any |
15 | 15 |
* purpose. |
16 | 16 |
* |
17 | 17 |
*/ |
18 | 18 |
|
19 | 19 |
#ifndef LEMON_MAX_MATCHING_H |
20 | 20 |
#define LEMON_MAX_MATCHING_H |
21 | 21 |
|
22 | 22 |
#include <vector> |
23 | 23 |
#include <queue> |
24 | 24 |
#include <set> |
25 | 25 |
#include <limits> |
26 | 26 |
|
27 | 27 |
#include <lemon/core.h> |
28 | 28 |
#include <lemon/unionfind.h> |
29 | 29 |
#include <lemon/bin_heap.h> |
30 | 30 |
#include <lemon/maps.h> |
31 | 31 |
|
32 | 32 |
///\ingroup matching |
33 | 33 |
///\file |
34 | 34 |
///\brief Maximum matching algorithms in general graphs. |
35 | 35 |
|
36 | 36 |
namespace lemon { |
37 | 37 |
|
38 | 38 |
/// \ingroup matching |
39 | 39 |
/// |
40 | 40 |
/// \brief Edmonds' alternating forest maximum matching algorithm. |
41 | 41 |
/// |
42 | 42 |
/// This class implements Edmonds' alternating forest matching |
43 | 43 |
/// algorithm. The algorithm can be started from an arbitrary initial |
44 | 44 |
/// matching (the default is the empty one) |
45 | 45 |
/// |
46 | 46 |
/// The dual solution of the problem is a map of the nodes to |
47 | 47 |
/// MaxMatching::Status, having values \c EVEN/D, \c ODD/A and \c |
48 | 48 |
/// MATCHED/C showing the Gallai-Edmonds decomposition of the |
49 | 49 |
/// graph. The nodes in \c EVEN/D induce a graph with |
50 | 50 |
/// factor-critical components, the nodes in \c ODD/A form the |
51 | 51 |
/// barrier, and the nodes in \c MATCHED/C induce a graph having a |
52 | 52 |
/// perfect matching. The number of the factor-critical components |
53 | 53 |
/// minus the number of barrier nodes is a lower bound on the |
54 | 54 |
/// unmatched nodes, and the matching is optimal if and only if this bound is |
55 | 55 |
/// tight. This decomposition can be attained by calling \c |
56 | 56 |
/// decomposition() after running the algorithm. |
57 | 57 |
/// |
58 | 58 |
/// \param GR The graph type the algorithm runs on. |
59 | 59 |
template <typename GR> |
60 | 60 |
class MaxMatching { |
61 | 61 |
public: |
62 | 62 |
|
63 | 63 |
typedef GR Graph; |
64 | 64 |
typedef typename Graph::template NodeMap<typename Graph::Arc> |
65 | 65 |
MatchingMap; |
66 | 66 |
|
67 | 67 |
///\brief Indicates the Gallai-Edmonds decomposition of the graph. |
68 | 68 |
/// |
69 | 69 |
///Indicates the Gallai-Edmonds decomposition of the graph. The |
70 | 70 |
///nodes with Status \c EVEN/D induce a graph with factor-critical |
71 | 71 |
///components, the nodes in \c ODD/A form the canonical barrier, |
72 | 72 |
///and the nodes in \c MATCHED/C induce a graph having a perfect |
73 | 73 |
///matching. |
74 | 74 |
enum Status { |
75 | 75 |
EVEN = 1, D = 1, MATCHED = 0, C = 0, ODD = -1, A = -1, UNMATCHED = -2 |
76 | 76 |
}; |
77 | 77 |
|
78 | 78 |
typedef typename Graph::template NodeMap<Status> StatusMap; |
79 | 79 |
|
80 | 80 |
private: |
81 | 81 |
|
82 | 82 |
TEMPLATE_GRAPH_TYPEDEFS(Graph); |
83 | 83 |
|
84 | 84 |
typedef UnionFindEnum<IntNodeMap> BlossomSet; |
85 | 85 |
typedef ExtendFindEnum<IntNodeMap> TreeSet; |
86 | 86 |
typedef RangeMap<Node> NodeIntMap; |
87 | 87 |
typedef MatchingMap EarMap; |
88 | 88 |
typedef std::vector<Node> NodeQueue; |
89 | 89 |
|
90 | 90 |
const Graph& _graph; |
91 | 91 |
MatchingMap* _matching; |
92 | 92 |
StatusMap* _status; |
93 | 93 |
|
94 | 94 |
EarMap* _ear; |
95 | 95 |
|
96 | 96 |
IntNodeMap* _blossom_set_index; |
97 | 97 |
BlossomSet* _blossom_set; |
98 | 98 |
NodeIntMap* _blossom_rep; |
99 | 99 |
|
100 | 100 |
IntNodeMap* _tree_set_index; |
101 | 101 |
TreeSet* _tree_set; |
102 | 102 |
|
103 | 103 |
NodeQueue _node_queue; |
104 | 104 |
int _process, _postpone, _last; |
105 | 105 |
|
106 | 106 |
int _node_num; |
107 | 107 |
|
108 | 108 |
private: |
109 | 109 |
|
110 | 110 |
void createStructures() { |
111 | 111 |
_node_num = countNodes(_graph); |
112 | 112 |
if (!_matching) { |
113 | 113 |
_matching = new MatchingMap(_graph); |
114 | 114 |
} |
115 | 115 |
if (!_status) { |
116 | 116 |
_status = new StatusMap(_graph); |
117 | 117 |
} |
118 | 118 |
if (!_ear) { |
119 | 119 |
_ear = new EarMap(_graph); |
120 | 120 |
} |
121 | 121 |
if (!_blossom_set) { |
122 | 122 |
_blossom_set_index = new IntNodeMap(_graph); |
123 | 123 |
_blossom_set = new BlossomSet(*_blossom_set_index); |
124 | 124 |
} |
125 | 125 |
if (!_blossom_rep) { |
126 | 126 |
_blossom_rep = new NodeIntMap(_node_num); |
127 | 127 |
} |
128 | 128 |
if (!_tree_set) { |
129 | 129 |
_tree_set_index = new IntNodeMap(_graph); |
130 | 130 |
_tree_set = new TreeSet(*_tree_set_index); |
131 | 131 |
} |
132 | 132 |
_node_queue.resize(_node_num); |
133 | 133 |
} |
134 | 134 |
|
135 | 135 |
void destroyStructures() { |
136 | 136 |
if (_matching) { |
137 | 137 |
delete _matching; |
138 | 138 |
} |
139 | 139 |
if (_status) { |
140 | 140 |
delete _status; |
141 | 141 |
} |
142 | 142 |
if (_ear) { |
143 | 143 |
delete _ear; |
144 | 144 |
} |
145 | 145 |
if (_blossom_set) { |
146 | 146 |
delete _blossom_set; |
147 | 147 |
delete _blossom_set_index; |
148 | 148 |
} |
149 | 149 |
if (_blossom_rep) { |
150 | 150 |
delete _blossom_rep; |
151 | 151 |
} |
152 | 152 |
if (_tree_set) { |
153 | 153 |
delete _tree_set_index; |
154 | 154 |
delete _tree_set; |
155 | 155 |
} |
156 | 156 |
} |
157 | 157 |
|
158 | 158 |
void processDense(const Node& n) { |
159 | 159 |
_process = _postpone = _last = 0; |
160 | 160 |
_node_queue[_last++] = n; |
161 | 161 |
|
162 | 162 |
while (_process != _last) { |
163 | 163 |
Node u = _node_queue[_process++]; |
164 | 164 |
for (OutArcIt a(_graph, u); a != INVALID; ++a) { |
165 | 165 |
Node v = _graph.target(a); |
166 | 166 |
if ((*_status)[v] == MATCHED) { |
167 | 167 |
extendOnArc(a); |
168 | 168 |
} else if ((*_status)[v] == UNMATCHED) { |
169 | 169 |
augmentOnArc(a); |
170 | 170 |
return; |
171 | 171 |
} |
172 | 172 |
} |
173 | 173 |
} |
174 | 174 |
|
175 | 175 |
while (_postpone != _last) { |
176 | 176 |
Node u = _node_queue[_postpone++]; |
177 | 177 |
|
178 | 178 |
for (OutArcIt a(_graph, u); a != INVALID ; ++a) { |
179 | 179 |
Node v = _graph.target(a); |
180 | 180 |
|
181 | 181 |
if ((*_status)[v] == EVEN) { |
182 | 182 |
if (_blossom_set->find(u) != _blossom_set->find(v)) { |
183 | 183 |
shrinkOnEdge(a); |
184 | 184 |
} |
185 | 185 |
} |
186 | 186 |
|
187 | 187 |
while (_process != _last) { |
188 | 188 |
Node w = _node_queue[_process++]; |
189 | 189 |
for (OutArcIt b(_graph, w); b != INVALID; ++b) { |
190 | 190 |
Node x = _graph.target(b); |
191 | 191 |
if ((*_status)[x] == MATCHED) { |
192 | 192 |
extendOnArc(b); |
193 | 193 |
} else if ((*_status)[x] == UNMATCHED) { |
194 | 194 |
augmentOnArc(b); |
195 | 195 |
return; |
196 | 196 |
} |
197 | 197 |
} |
198 | 198 |
} |
199 | 199 |
} |
200 | 200 |
} |
201 | 201 |
} |
202 | 202 |
|
203 | 203 |
void processSparse(const Node& n) { |
204 | 204 |
_process = _last = 0; |
205 | 205 |
_node_queue[_last++] = n; |
206 | 206 |
while (_process != _last) { |
207 | 207 |
Node u = _node_queue[_process++]; |
208 | 208 |
for (OutArcIt a(_graph, u); a != INVALID; ++a) { |
209 | 209 |
Node v = _graph.target(a); |
210 | 210 |
|
211 | 211 |
if ((*_status)[v] == EVEN) { |
212 | 212 |
if (_blossom_set->find(u) != _blossom_set->find(v)) { |
213 | 213 |
shrinkOnEdge(a); |
214 | 214 |
} |
215 | 215 |
} else if ((*_status)[v] == MATCHED) { |
216 | 216 |
extendOnArc(a); |
217 | 217 |
} else if ((*_status)[v] == UNMATCHED) { |
218 | 218 |
augmentOnArc(a); |
219 | 219 |
return; |
220 | 220 |
} |
221 | 221 |
} |
222 | 222 |
} |
223 | 223 |
} |
224 | 224 |
|
225 | 225 |
void shrinkOnEdge(const Edge& e) { |
226 | 226 |
Node nca = INVALID; |
227 | 227 |
|
228 | 228 |
{ |
229 | 229 |
std::set<Node> left_set, right_set; |
230 | 230 |
|
231 | 231 |
Node left = (*_blossom_rep)[_blossom_set->find(_graph.u(e))]; |
232 | 232 |
left_set.insert(left); |
233 | 233 |
|
234 | 234 |
Node right = (*_blossom_rep)[_blossom_set->find(_graph.v(e))]; |
235 | 235 |
right_set.insert(right); |
236 | 236 |
|
237 | 237 |
while (true) { |
238 | 238 |
if ((*_matching)[left] == INVALID) break; |
239 | 239 |
left = _graph.target((*_matching)[left]); |
240 | 240 |
left = (*_blossom_rep)[_blossom_set-> |
241 | 241 |
find(_graph.target((*_ear)[left]))]; |
242 | 242 |
if (right_set.find(left) != right_set.end()) { |
243 | 243 |
nca = left; |
244 | 244 |
break; |
245 | 245 |
} |
246 | 246 |
left_set.insert(left); |
247 | 247 |
|
248 | 248 |
if ((*_matching)[right] == INVALID) break; |
249 | 249 |
right = _graph.target((*_matching)[right]); |
250 | 250 |
right = (*_blossom_rep)[_blossom_set-> |
251 | 251 |
find(_graph.target((*_ear)[right]))]; |
252 | 252 |
if (left_set.find(right) != left_set.end()) { |
253 | 253 |
nca = right; |
254 | 254 |
break; |
255 | 255 |
} |
256 | 256 |
right_set.insert(right); |
257 | 257 |
} |
258 | 258 |
|
259 | 259 |
if (nca == INVALID) { |
260 | 260 |
if ((*_matching)[left] == INVALID) { |
261 | 261 |
nca = right; |
262 | 262 |
while (left_set.find(nca) == left_set.end()) { |
263 | 263 |
nca = _graph.target((*_matching)[nca]); |
264 | 264 |
nca =(*_blossom_rep)[_blossom_set-> |
265 | 265 |
find(_graph.target((*_ear)[nca]))]; |
266 | 266 |
} |
267 | 267 |
} else { |
268 | 268 |
nca = left; |
269 | 269 |
while (right_set.find(nca) == right_set.end()) { |
270 | 270 |
nca = _graph.target((*_matching)[nca]); |
271 | 271 |
nca = (*_blossom_rep)[_blossom_set-> |
272 | 272 |
find(_graph.target((*_ear)[nca]))]; |
273 | 273 |
} |
274 | 274 |
} |
275 | 275 |
} |
276 | 276 |
} |
277 | 277 |
|
278 | 278 |
{ |
279 | 279 |
|
280 | 280 |
Node node = _graph.u(e); |
281 | 281 |
Arc arc = _graph.direct(e, true); |
282 | 282 |
Node base = (*_blossom_rep)[_blossom_set->find(node)]; |
283 | 283 |
|
284 | 284 |
while (base != nca) { |
285 |
_ear |
|
285 |
(*_ear)[node] = arc; |
|
286 | 286 |
|
287 | 287 |
Node n = node; |
288 | 288 |
while (n != base) { |
289 | 289 |
n = _graph.target((*_matching)[n]); |
290 | 290 |
Arc a = (*_ear)[n]; |
291 | 291 |
n = _graph.target(a); |
292 |
_ear |
|
292 |
(*_ear)[n] = _graph.oppositeArc(a); |
|
293 | 293 |
} |
294 | 294 |
node = _graph.target((*_matching)[base]); |
295 | 295 |
_tree_set->erase(base); |
296 | 296 |
_tree_set->erase(node); |
297 | 297 |
_blossom_set->insert(node, _blossom_set->find(base)); |
298 |
_status |
|
298 |
(*_status)[node] = EVEN; |
|
299 | 299 |
_node_queue[_last++] = node; |
300 | 300 |
arc = _graph.oppositeArc((*_ear)[node]); |
301 | 301 |
node = _graph.target((*_ear)[node]); |
302 | 302 |
base = (*_blossom_rep)[_blossom_set->find(node)]; |
303 | 303 |
_blossom_set->join(_graph.target(arc), base); |
304 | 304 |
} |
305 | 305 |
} |
306 | 306 |
|
307 |
_blossom_rep |
|
307 |
(*_blossom_rep)[_blossom_set->find(nca)] = nca; |
|
308 | 308 |
|
309 | 309 |
{ |
310 | 310 |
|
311 | 311 |
Node node = _graph.v(e); |
312 | 312 |
Arc arc = _graph.direct(e, false); |
313 | 313 |
Node base = (*_blossom_rep)[_blossom_set->find(node)]; |
314 | 314 |
|
315 | 315 |
while (base != nca) { |
316 |
_ear |
|
316 |
(*_ear)[node] = arc; |
|
317 | 317 |
|
318 | 318 |
Node n = node; |
319 | 319 |
while (n != base) { |
320 | 320 |
n = _graph.target((*_matching)[n]); |
321 | 321 |
Arc a = (*_ear)[n]; |
322 | 322 |
n = _graph.target(a); |
323 |
_ear |
|
323 |
(*_ear)[n] = _graph.oppositeArc(a); |
|
324 | 324 |
} |
325 | 325 |
node = _graph.target((*_matching)[base]); |
326 | 326 |
_tree_set->erase(base); |
327 | 327 |
_tree_set->erase(node); |
328 | 328 |
_blossom_set->insert(node, _blossom_set->find(base)); |
329 |
_status |
|
329 |
(*_status)[node] = EVEN; |
|
330 | 330 |
_node_queue[_last++] = node; |
331 | 331 |
arc = _graph.oppositeArc((*_ear)[node]); |
332 | 332 |
node = _graph.target((*_ear)[node]); |
333 | 333 |
base = (*_blossom_rep)[_blossom_set->find(node)]; |
334 | 334 |
_blossom_set->join(_graph.target(arc), base); |
335 | 335 |
} |
336 | 336 |
} |
337 | 337 |
|
338 |
_blossom_rep |
|
338 |
(*_blossom_rep)[_blossom_set->find(nca)] = nca; |
|
339 | 339 |
} |
340 | 340 |
|
341 | 341 |
|
342 | 342 |
|
343 | 343 |
void extendOnArc(const Arc& a) { |
344 | 344 |
Node base = _graph.source(a); |
345 | 345 |
Node odd = _graph.target(a); |
346 | 346 |
|
347 |
_ear |
|
347 |
(*_ear)[odd] = _graph.oppositeArc(a); |
|
348 | 348 |
Node even = _graph.target((*_matching)[odd]); |
349 |
_blossom_rep->set(_blossom_set->insert(even), even); |
|
350 |
_status->set(odd, ODD); |
|
351 |
|
|
349 |
(*_blossom_rep)[_blossom_set->insert(even)] = even; |
|
350 |
(*_status)[odd] = ODD; |
|
351 |
(*_status)[even] = EVEN; |
|
352 | 352 |
int tree = _tree_set->find((*_blossom_rep)[_blossom_set->find(base)]); |
353 | 353 |
_tree_set->insert(odd, tree); |
354 | 354 |
_tree_set->insert(even, tree); |
355 | 355 |
_node_queue[_last++] = even; |
356 | 356 |
|
357 | 357 |
} |
358 | 358 |
|
359 | 359 |
void augmentOnArc(const Arc& a) { |
360 | 360 |
Node even = _graph.source(a); |
361 | 361 |
Node odd = _graph.target(a); |
362 | 362 |
|
363 | 363 |
int tree = _tree_set->find((*_blossom_rep)[_blossom_set->find(even)]); |
364 | 364 |
|
365 |
_matching->set(odd, _graph.oppositeArc(a)); |
|
366 |
_status->set(odd, MATCHED); |
|
365 |
(*_matching)[odd] = _graph.oppositeArc(a); |
|
366 |
(*_status)[odd] = MATCHED; |
|
367 | 367 |
|
368 | 368 |
Arc arc = (*_matching)[even]; |
369 |
_matching |
|
369 |
(*_matching)[even] = a; |
|
370 | 370 |
|
371 | 371 |
while (arc != INVALID) { |
372 | 372 |
odd = _graph.target(arc); |
373 | 373 |
arc = (*_ear)[odd]; |
374 | 374 |
even = _graph.target(arc); |
375 |
_matching |
|
375 |
(*_matching)[odd] = arc; |
|
376 | 376 |
arc = (*_matching)[even]; |
377 |
_matching |
|
377 |
(*_matching)[even] = _graph.oppositeArc((*_matching)[odd]); |
|
378 | 378 |
} |
379 | 379 |
|
380 | 380 |
for (typename TreeSet::ItemIt it(*_tree_set, tree); |
381 | 381 |
it != INVALID; ++it) { |
382 | 382 |
if ((*_status)[it] == ODD) { |
383 |
_status |
|
383 |
(*_status)[it] = MATCHED; |
|
384 | 384 |
} else { |
385 | 385 |
int blossom = _blossom_set->find(it); |
386 | 386 |
for (typename BlossomSet::ItemIt jt(*_blossom_set, blossom); |
387 | 387 |
jt != INVALID; ++jt) { |
388 |
_status |
|
388 |
(*_status)[jt] = MATCHED; |
|
389 | 389 |
} |
390 | 390 |
_blossom_set->eraseClass(blossom); |
391 | 391 |
} |
392 | 392 |
} |
393 | 393 |
_tree_set->eraseClass(tree); |
394 | 394 |
|
395 | 395 |
} |
396 | 396 |
|
397 | 397 |
public: |
398 | 398 |
|
399 | 399 |
/// \brief Constructor |
400 | 400 |
/// |
401 | 401 |
/// Constructor. |
402 | 402 |
MaxMatching(const Graph& graph) |
403 | 403 |
: _graph(graph), _matching(0), _status(0), _ear(0), |
404 | 404 |
_blossom_set_index(0), _blossom_set(0), _blossom_rep(0), |
405 | 405 |
_tree_set_index(0), _tree_set(0) {} |
406 | 406 |
|
407 | 407 |
~MaxMatching() { |
408 | 408 |
destroyStructures(); |
409 | 409 |
} |
410 | 410 |
|
411 | 411 |
/// \name Execution control |
412 | 412 |
/// The simplest way to execute the algorithm is to use the |
413 | 413 |
/// \c run() member function. |
414 | 414 |
/// \n |
415 | 415 |
|
416 | 416 |
/// If you need better control on the execution, you must call |
417 | 417 |
/// \ref init(), \ref greedyInit() or \ref matchingInit() |
418 | 418 |
/// functions first, then you can start the algorithm with the \ref |
419 | 419 |
/// startSparse() or startDense() functions. |
420 | 420 |
|
421 | 421 |
///@{ |
422 | 422 |
|
423 | 423 |
/// \brief Sets the actual matching to the empty matching. |
424 | 424 |
/// |
425 | 425 |
/// Sets the actual matching to the empty matching. |
426 | 426 |
/// |
427 | 427 |
void init() { |
428 | 428 |
createStructures(); |
429 | 429 |
for(NodeIt n(_graph); n != INVALID; ++n) { |
430 |
_matching->set(n, INVALID); |
|
431 |
_status->set(n, UNMATCHED); |
|
430 |
(*_matching)[n] = INVALID; |
|
431 |
(*_status)[n] = UNMATCHED; |
|
432 | 432 |
} |
433 | 433 |
} |
434 | 434 |
|
435 | 435 |
///\brief Finds an initial matching in a greedy way |
436 | 436 |
/// |
437 | 437 |
///It finds an initial matching in a greedy way. |
438 | 438 |
void greedyInit() { |
439 | 439 |
createStructures(); |
440 | 440 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
441 |
_matching->set(n, INVALID); |
|
442 |
_status->set(n, UNMATCHED); |
|
441 |
(*_matching)[n] = INVALID; |
|
442 |
(*_status)[n] = UNMATCHED; |
|
443 | 443 |
} |
444 | 444 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
445 | 445 |
if ((*_matching)[n] == INVALID) { |
446 | 446 |
for (OutArcIt a(_graph, n); a != INVALID ; ++a) { |
447 | 447 |
Node v = _graph.target(a); |
448 | 448 |
if ((*_matching)[v] == INVALID && v != n) { |
449 |
_matching->set(n, a); |
|
450 |
_status->set(n, MATCHED); |
|
451 |
_matching->set(v, _graph.oppositeArc(a)); |
|
452 |
_status->set(v, MATCHED); |
|
449 |
(*_matching)[n] = a; |
|
450 |
(*_status)[n] = MATCHED; |
|
451 |
(*_matching)[v] = _graph.oppositeArc(a); |
|
452 |
(*_status)[v] = MATCHED; |
|
453 | 453 |
break; |
454 | 454 |
} |
455 | 455 |
} |
456 | 456 |
} |
457 | 457 |
} |
458 | 458 |
} |
459 | 459 |
|
460 | 460 |
|
461 | 461 |
/// \brief Initialize the matching from a map containing. |
462 | 462 |
/// |
463 | 463 |
/// Initialize the matching from a \c bool valued \c Edge map. This |
464 | 464 |
/// map must have the property that there are no two incident edges |
465 | 465 |
/// with true value, ie. it contains a matching. |
466 | 466 |
/// \return \c true if the map contains a matching. |
467 | 467 |
template <typename MatchingMap> |
468 | 468 |
bool matchingInit(const MatchingMap& matching) { |
469 | 469 |
createStructures(); |
470 | 470 |
|
471 | 471 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
472 |
_matching->set(n, INVALID); |
|
473 |
_status->set(n, UNMATCHED); |
|
472 |
(*_matching)[n] = INVALID; |
|
473 |
(*_status)[n] = UNMATCHED; |
|
474 | 474 |
} |
475 | 475 |
for(EdgeIt e(_graph); e!=INVALID; ++e) { |
476 | 476 |
if (matching[e]) { |
477 | 477 |
|
478 | 478 |
Node u = _graph.u(e); |
479 | 479 |
if ((*_matching)[u] != INVALID) return false; |
480 |
_matching->set(u, _graph.direct(e, true)); |
|
481 |
_status->set(u, MATCHED); |
|
480 |
(*_matching)[u] = _graph.direct(e, true); |
|
481 |
(*_status)[u] = MATCHED; |
|
482 | 482 |
|
483 | 483 |
Node v = _graph.v(e); |
484 | 484 |
if ((*_matching)[v] != INVALID) return false; |
485 |
_matching->set(v, _graph.direct(e, false)); |
|
486 |
_status->set(v, MATCHED); |
|
485 |
(*_matching)[v] = _graph.direct(e, false); |
|
486 |
(*_status)[v] = MATCHED; |
|
487 | 487 |
} |
488 | 488 |
} |
489 | 489 |
return true; |
490 | 490 |
} |
491 | 491 |
|
492 | 492 |
/// \brief Starts Edmonds' algorithm |
493 | 493 |
/// |
494 | 494 |
/// If runs the original Edmonds' algorithm. |
495 | 495 |
void startSparse() { |
496 | 496 |
for(NodeIt n(_graph); n != INVALID; ++n) { |
497 | 497 |
if ((*_status)[n] == UNMATCHED) { |
498 | 498 |
(*_blossom_rep)[_blossom_set->insert(n)] = n; |
499 | 499 |
_tree_set->insert(n); |
500 |
_status |
|
500 |
(*_status)[n] = EVEN; |
|
501 | 501 |
processSparse(n); |
502 | 502 |
} |
503 | 503 |
} |
504 | 504 |
} |
505 | 505 |
|
506 | 506 |
/// \brief Starts Edmonds' algorithm. |
507 | 507 |
/// |
508 | 508 |
/// It runs Edmonds' algorithm with a heuristic of postponing |
509 | 509 |
/// shrinks, therefore resulting in a faster algorithm for dense graphs. |
510 | 510 |
void startDense() { |
511 | 511 |
for(NodeIt n(_graph); n != INVALID; ++n) { |
512 | 512 |
if ((*_status)[n] == UNMATCHED) { |
513 | 513 |
(*_blossom_rep)[_blossom_set->insert(n)] = n; |
514 | 514 |
_tree_set->insert(n); |
515 |
_status |
|
515 |
(*_status)[n] = EVEN; |
|
516 | 516 |
processDense(n); |
517 | 517 |
} |
518 | 518 |
} |
519 | 519 |
} |
520 | 520 |
|
521 | 521 |
|
522 | 522 |
/// \brief Runs Edmonds' algorithm |
523 | 523 |
/// |
524 | 524 |
/// Runs Edmonds' algorithm for sparse graphs (<tt>m<2*n</tt>) |
525 | 525 |
/// or Edmonds' algorithm with a heuristic of |
526 | 526 |
/// postponing shrinks for dense graphs. |
527 | 527 |
void run() { |
528 | 528 |
if (countEdges(_graph) < 2 * countNodes(_graph)) { |
529 | 529 |
greedyInit(); |
530 | 530 |
startSparse(); |
531 | 531 |
} else { |
532 | 532 |
init(); |
533 | 533 |
startDense(); |
534 | 534 |
} |
535 | 535 |
} |
536 | 536 |
|
537 | 537 |
/// @} |
538 | 538 |
|
539 | 539 |
/// \name Primal solution |
540 | 540 |
/// Functions to get the primal solution, ie. the matching. |
541 | 541 |
|
542 | 542 |
/// @{ |
543 | 543 |
|
544 | 544 |
///\brief Returns the size of the current matching. |
545 | 545 |
/// |
546 | 546 |
///Returns the size of the current matching. After \ref |
547 | 547 |
///run() it returns the size of the maximum matching in the graph. |
548 | 548 |
int matchingSize() const { |
549 | 549 |
int size = 0; |
550 | 550 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
551 | 551 |
if ((*_matching)[n] != INVALID) { |
552 | 552 |
++size; |
553 | 553 |
} |
554 | 554 |
} |
555 | 555 |
return size / 2; |
556 | 556 |
} |
557 | 557 |
|
558 | 558 |
/// \brief Returns true when the edge is in the matching. |
559 | 559 |
/// |
560 | 560 |
/// Returns true when the edge is in the matching. |
561 | 561 |
bool matching(const Edge& edge) const { |
562 | 562 |
return edge == (*_matching)[_graph.u(edge)]; |
563 | 563 |
} |
564 | 564 |
|
565 | 565 |
/// \brief Returns the matching edge incident to the given node. |
566 | 566 |
/// |
567 | 567 |
/// Returns the matching edge of a \c node in the actual matching or |
568 | 568 |
/// INVALID if the \c node is not covered by the actual matching. |
569 | 569 |
Arc matching(const Node& n) const { |
570 | 570 |
return (*_matching)[n]; |
571 | 571 |
} |
572 | 572 |
|
573 | 573 |
///\brief Returns the mate of a node in the actual matching. |
574 | 574 |
/// |
575 | 575 |
///Returns the mate of a \c node in the actual matching or |
576 | 576 |
///INVALID if the \c node is not covered by the actual matching. |
577 | 577 |
Node mate(const Node& n) const { |
578 | 578 |
return (*_matching)[n] != INVALID ? |
579 | 579 |
_graph.target((*_matching)[n]) : INVALID; |
580 | 580 |
} |
581 | 581 |
|
582 | 582 |
/// @} |
583 | 583 |
|
584 | 584 |
/// \name Dual solution |
585 | 585 |
/// Functions to get the dual solution, ie. the decomposition. |
586 | 586 |
|
587 | 587 |
/// @{ |
588 | 588 |
|
589 | 589 |
/// \brief Returns the class of the node in the Edmonds-Gallai |
590 | 590 |
/// decomposition. |
591 | 591 |
/// |
592 | 592 |
/// Returns the class of the node in the Edmonds-Gallai |
593 | 593 |
/// decomposition. |
594 | 594 |
Status decomposition(const Node& n) const { |
595 | 595 |
return (*_status)[n]; |
596 | 596 |
} |
597 | 597 |
|
598 | 598 |
/// \brief Returns true when the node is in the barrier. |
599 | 599 |
/// |
600 | 600 |
/// Returns true when the node is in the barrier. |
601 | 601 |
bool barrier(const Node& n) const { |
602 | 602 |
return (*_status)[n] == ODD; |
603 | 603 |
} |
604 | 604 |
|
605 | 605 |
/// @} |
606 | 606 |
|
607 | 607 |
}; |
608 | 608 |
|
609 | 609 |
/// \ingroup matching |
610 | 610 |
/// |
611 | 611 |
/// \brief Weighted matching in general graphs |
612 | 612 |
/// |
613 | 613 |
/// This class provides an efficient implementation of Edmond's |
614 | 614 |
/// maximum weighted matching algorithm. The implementation is based |
615 | 615 |
/// on extensive use of priority queues and provides |
616 | 616 |
/// \f$O(nm\log n)\f$ time complexity. |
617 | 617 |
/// |
618 | 618 |
/// The maximum weighted matching problem is to find undirected |
619 | 619 |
/// edges in the graph with maximum overall weight and no two of |
620 | 620 |
/// them shares their ends. The problem can be formulated with the |
621 | 621 |
/// following linear program. |
622 | 622 |
/// \f[ \sum_{e \in \delta(u)}x_e \le 1 \quad \forall u\in V\f] |
623 | 623 |
/** \f[ \sum_{e \in \gamma(B)}x_e \le \frac{\vert B \vert - 1}{2} |
624 | 624 |
\quad \forall B\in\mathcal{O}\f] */ |
625 | 625 |
/// \f[x_e \ge 0\quad \forall e\in E\f] |
626 | 626 |
/// \f[\max \sum_{e\in E}x_ew_e\f] |
627 | 627 |
/// where \f$\delta(X)\f$ is the set of edges incident to a node in |
628 | 628 |
/// \f$X\f$, \f$\gamma(X)\f$ is the set of edges with both ends in |
629 | 629 |
/// \f$X\f$ and \f$\mathcal{O}\f$ is the set of odd cardinality |
630 | 630 |
/// subsets of the nodes. |
631 | 631 |
/// |
632 | 632 |
/// The algorithm calculates an optimal matching and a proof of the |
633 | 633 |
/// optimality. The solution of the dual problem can be used to check |
634 | 634 |
/// the result of the algorithm. The dual linear problem is the |
635 | 635 |
/** \f[ y_u + y_v + \sum_{B \in \mathcal{O}, uv \in \gamma(B)} |
636 | 636 |
z_B \ge w_{uv} \quad \forall uv\in E\f] */ |
637 | 637 |
/// \f[y_u \ge 0 \quad \forall u \in V\f] |
638 | 638 |
/// \f[z_B \ge 0 \quad \forall B \in \mathcal{O}\f] |
639 | 639 |
/** \f[\min \sum_{u \in V}y_u + \sum_{B \in \mathcal{O}} |
640 | 640 |
\frac{\vert B \vert - 1}{2}z_B\f] */ |
641 | 641 |
/// |
642 | 642 |
/// The algorithm can be executed with \c run() or the \c init() and |
643 | 643 |
/// then the \c start() member functions. After it the matching can |
644 | 644 |
/// be asked with \c matching() or mate() functions. The dual |
645 | 645 |
/// solution can be get with \c nodeValue(), \c blossomNum() and \c |
646 | 646 |
/// blossomValue() members and \ref MaxWeightedMatching::BlossomIt |
647 | 647 |
/// "BlossomIt" nested class, which is able to iterate on the nodes |
648 | 648 |
/// of a blossom. If the value type is integral then the dual |
649 | 649 |
/// solution is multiplied by \ref MaxWeightedMatching::dualScale "4". |
650 | 650 |
template <typename GR, |
651 | 651 |
typename WM = typename GR::template EdgeMap<int> > |
652 | 652 |
class MaxWeightedMatching { |
653 | 653 |
public: |
654 | 654 |
|
655 | 655 |
///\e |
656 | 656 |
typedef GR Graph; |
657 | 657 |
///\e |
658 | 658 |
typedef WM WeightMap; |
659 | 659 |
///\e |
660 | 660 |
typedef typename WeightMap::Value Value; |
661 | 661 |
|
662 | 662 |
/// \brief Scaling factor for dual solution |
663 | 663 |
/// |
664 | 664 |
/// Scaling factor for dual solution, it is equal to 4 or 1 |
665 | 665 |
/// according to the value type. |
666 | 666 |
static const int dualScale = |
667 | 667 |
std::numeric_limits<Value>::is_integer ? 4 : 1; |
668 | 668 |
|
669 | 669 |
typedef typename Graph::template NodeMap<typename Graph::Arc> |
670 | 670 |
MatchingMap; |
671 | 671 |
|
672 | 672 |
private: |
673 | 673 |
|
674 | 674 |
TEMPLATE_GRAPH_TYPEDEFS(Graph); |
675 | 675 |
|
676 | 676 |
typedef typename Graph::template NodeMap<Value> NodePotential; |
677 | 677 |
typedef std::vector<Node> BlossomNodeList; |
678 | 678 |
|
679 | 679 |
struct BlossomVariable { |
680 | 680 |
int begin, end; |
681 | 681 |
Value value; |
682 | 682 |
|
683 | 683 |
BlossomVariable(int _begin, int _end, Value _value) |
684 | 684 |
: begin(_begin), end(_end), value(_value) {} |
685 | 685 |
|
686 | 686 |
}; |
687 | 687 |
|
688 | 688 |
typedef std::vector<BlossomVariable> BlossomPotential; |
689 | 689 |
|
690 | 690 |
const Graph& _graph; |
691 | 691 |
const WeightMap& _weight; |
692 | 692 |
|
693 | 693 |
MatchingMap* _matching; |
694 | 694 |
|
695 | 695 |
NodePotential* _node_potential; |
696 | 696 |
|
697 | 697 |
BlossomPotential _blossom_potential; |
698 | 698 |
BlossomNodeList _blossom_node_list; |
699 | 699 |
|
700 | 700 |
int _node_num; |
701 | 701 |
int _blossom_num; |
702 | 702 |
|
703 | 703 |
typedef RangeMap<int> IntIntMap; |
704 | 704 |
|
705 | 705 |
enum Status { |
706 | 706 |
EVEN = -1, MATCHED = 0, ODD = 1, UNMATCHED = -2 |
707 | 707 |
}; |
708 | 708 |
|
709 | 709 |
typedef HeapUnionFind<Value, IntNodeMap> BlossomSet; |
710 | 710 |
struct BlossomData { |
711 | 711 |
int tree; |
712 | 712 |
Status status; |
713 | 713 |
Arc pred, next; |
714 | 714 |
Value pot, offset; |
715 | 715 |
Node base; |
716 | 716 |
}; |
717 | 717 |
|
718 | 718 |
IntNodeMap *_blossom_index; |
719 | 719 |
BlossomSet *_blossom_set; |
720 | 720 |
RangeMap<BlossomData>* _blossom_data; |
721 | 721 |
|
722 | 722 |
IntNodeMap *_node_index; |
723 | 723 |
IntArcMap *_node_heap_index; |
724 | 724 |
|
725 | 725 |
struct NodeData { |
726 | 726 |
|
727 | 727 |
NodeData(IntArcMap& node_heap_index) |
728 | 728 |
: heap(node_heap_index) {} |
729 | 729 |
|
730 | 730 |
int blossom; |
731 | 731 |
Value pot; |
732 | 732 |
BinHeap<Value, IntArcMap> heap; |
733 | 733 |
std::map<int, Arc> heap_index; |
734 | 734 |
|
735 | 735 |
int tree; |
736 | 736 |
}; |
737 | 737 |
|
738 | 738 |
RangeMap<NodeData>* _node_data; |
739 | 739 |
|
740 | 740 |
typedef ExtendFindEnum<IntIntMap> TreeSet; |
741 | 741 |
|
742 | 742 |
IntIntMap *_tree_set_index; |
743 | 743 |
TreeSet *_tree_set; |
744 | 744 |
|
745 | 745 |
IntNodeMap *_delta1_index; |
746 | 746 |
BinHeap<Value, IntNodeMap> *_delta1; |
747 | 747 |
|
748 | 748 |
IntIntMap *_delta2_index; |
749 | 749 |
BinHeap<Value, IntIntMap> *_delta2; |
750 | 750 |
|
751 | 751 |
IntEdgeMap *_delta3_index; |
752 | 752 |
BinHeap<Value, IntEdgeMap> *_delta3; |
753 | 753 |
|
754 | 754 |
IntIntMap *_delta4_index; |
755 | 755 |
BinHeap<Value, IntIntMap> *_delta4; |
756 | 756 |
|
757 | 757 |
Value _delta_sum; |
758 | 758 |
|
759 | 759 |
void createStructures() { |
760 | 760 |
_node_num = countNodes(_graph); |
761 | 761 |
_blossom_num = _node_num * 3 / 2; |
762 | 762 |
|
763 | 763 |
if (!_matching) { |
764 | 764 |
_matching = new MatchingMap(_graph); |
765 | 765 |
} |
766 | 766 |
if (!_node_potential) { |
767 | 767 |
_node_potential = new NodePotential(_graph); |
768 | 768 |
} |
769 | 769 |
if (!_blossom_set) { |
770 | 770 |
_blossom_index = new IntNodeMap(_graph); |
771 | 771 |
_blossom_set = new BlossomSet(*_blossom_index); |
772 | 772 |
_blossom_data = new RangeMap<BlossomData>(_blossom_num); |
773 | 773 |
} |
774 | 774 |
|
775 | 775 |
if (!_node_index) { |
776 | 776 |
_node_index = new IntNodeMap(_graph); |
777 | 777 |
_node_heap_index = new IntArcMap(_graph); |
778 | 778 |
_node_data = new RangeMap<NodeData>(_node_num, |
779 | 779 |
NodeData(*_node_heap_index)); |
780 | 780 |
} |
781 | 781 |
|
782 | 782 |
if (!_tree_set) { |
783 | 783 |
_tree_set_index = new IntIntMap(_blossom_num); |
784 | 784 |
_tree_set = new TreeSet(*_tree_set_index); |
785 | 785 |
} |
786 | 786 |
if (!_delta1) { |
787 | 787 |
_delta1_index = new IntNodeMap(_graph); |
788 | 788 |
_delta1 = new BinHeap<Value, IntNodeMap>(*_delta1_index); |
789 | 789 |
} |
790 | 790 |
if (!_delta2) { |
791 | 791 |
_delta2_index = new IntIntMap(_blossom_num); |
792 | 792 |
_delta2 = new BinHeap<Value, IntIntMap>(*_delta2_index); |
793 | 793 |
} |
794 | 794 |
if (!_delta3) { |
795 | 795 |
_delta3_index = new IntEdgeMap(_graph); |
796 | 796 |
_delta3 = new BinHeap<Value, IntEdgeMap>(*_delta3_index); |
797 | 797 |
} |
798 | 798 |
if (!_delta4) { |
799 | 799 |
_delta4_index = new IntIntMap(_blossom_num); |
800 | 800 |
_delta4 = new BinHeap<Value, IntIntMap>(*_delta4_index); |
801 | 801 |
} |
802 | 802 |
} |
803 | 803 |
|
804 | 804 |
void destroyStructures() { |
805 | 805 |
_node_num = countNodes(_graph); |
806 | 806 |
_blossom_num = _node_num * 3 / 2; |
807 | 807 |
|
808 | 808 |
if (_matching) { |
809 | 809 |
delete _matching; |
810 | 810 |
} |
811 | 811 |
if (_node_potential) { |
812 | 812 |
delete _node_potential; |
813 | 813 |
} |
814 | 814 |
if (_blossom_set) { |
815 | 815 |
delete _blossom_index; |
816 | 816 |
delete _blossom_set; |
817 | 817 |
delete _blossom_data; |
818 | 818 |
} |
819 | 819 |
|
820 | 820 |
if (_node_index) { |
821 | 821 |
delete _node_index; |
822 | 822 |
delete _node_heap_index; |
823 | 823 |
delete _node_data; |
824 | 824 |
} |
825 | 825 |
|
826 | 826 |
if (_tree_set) { |
827 | 827 |
delete _tree_set_index; |
828 | 828 |
delete _tree_set; |
829 | 829 |
} |
830 | 830 |
if (_delta1) { |
831 | 831 |
delete _delta1_index; |
832 | 832 |
delete _delta1; |
833 | 833 |
} |
834 | 834 |
if (_delta2) { |
835 | 835 |
delete _delta2_index; |
836 | 836 |
delete _delta2; |
837 | 837 |
} |
838 | 838 |
if (_delta3) { |
839 | 839 |
delete _delta3_index; |
840 | 840 |
delete _delta3; |
841 | 841 |
} |
842 | 842 |
if (_delta4) { |
843 | 843 |
delete _delta4_index; |
844 | 844 |
delete _delta4; |
845 | 845 |
} |
846 | 846 |
} |
847 | 847 |
|
848 | 848 |
void matchedToEven(int blossom, int tree) { |
849 | 849 |
if (_delta2->state(blossom) == _delta2->IN_HEAP) { |
850 | 850 |
_delta2->erase(blossom); |
851 | 851 |
} |
852 | 852 |
|
853 | 853 |
if (!_blossom_set->trivial(blossom)) { |
854 | 854 |
(*_blossom_data)[blossom].pot -= |
855 | 855 |
2 * (_delta_sum - (*_blossom_data)[blossom].offset); |
856 | 856 |
} |
857 | 857 |
|
858 | 858 |
for (typename BlossomSet::ItemIt n(*_blossom_set, blossom); |
859 | 859 |
n != INVALID; ++n) { |
860 | 860 |
|
861 | 861 |
_blossom_set->increase(n, std::numeric_limits<Value>::max()); |
862 | 862 |
int ni = (*_node_index)[n]; |
863 | 863 |
|
864 | 864 |
(*_node_data)[ni].heap.clear(); |
865 | 865 |
(*_node_data)[ni].heap_index.clear(); |
866 | 866 |
|
867 | 867 |
(*_node_data)[ni].pot += _delta_sum - (*_blossom_data)[blossom].offset; |
868 | 868 |
|
869 | 869 |
_delta1->push(n, (*_node_data)[ni].pot); |
870 | 870 |
|
871 | 871 |
for (InArcIt e(_graph, n); e != INVALID; ++e) { |
872 | 872 |
Node v = _graph.source(e); |
873 | 873 |
int vb = _blossom_set->find(v); |
874 | 874 |
int vi = (*_node_index)[v]; |
875 | 875 |
|
876 | 876 |
Value rw = (*_node_data)[ni].pot + (*_node_data)[vi].pot - |
877 | 877 |
dualScale * _weight[e]; |
878 | 878 |
|
879 | 879 |
if ((*_blossom_data)[vb].status == EVEN) { |
880 | 880 |
if (_delta3->state(e) != _delta3->IN_HEAP && blossom != vb) { |
881 | 881 |
_delta3->push(e, rw / 2); |
882 | 882 |
} |
883 | 883 |
} else if ((*_blossom_data)[vb].status == UNMATCHED) { |
884 | 884 |
if (_delta3->state(e) != _delta3->IN_HEAP) { |
885 | 885 |
_delta3->push(e, rw); |
886 | 886 |
} |
887 | 887 |
} else { |
888 | 888 |
typename std::map<int, Arc>::iterator it = |
889 | 889 |
(*_node_data)[vi].heap_index.find(tree); |
890 | 890 |
|
891 | 891 |
if (it != (*_node_data)[vi].heap_index.end()) { |
892 | 892 |
if ((*_node_data)[vi].heap[it->second] > rw) { |
893 | 893 |
(*_node_data)[vi].heap.replace(it->second, e); |
894 | 894 |
(*_node_data)[vi].heap.decrease(e, rw); |
895 | 895 |
it->second = e; |
896 | 896 |
} |
897 | 897 |
} else { |
898 | 898 |
(*_node_data)[vi].heap.push(e, rw); |
899 | 899 |
(*_node_data)[vi].heap_index.insert(std::make_pair(tree, e)); |
900 | 900 |
} |
901 | 901 |
|
902 | 902 |
if ((*_blossom_set)[v] > (*_node_data)[vi].heap.prio()) { |
903 | 903 |
_blossom_set->decrease(v, (*_node_data)[vi].heap.prio()); |
904 | 904 |
|
905 | 905 |
if ((*_blossom_data)[vb].status == MATCHED) { |
906 | 906 |
if (_delta2->state(vb) != _delta2->IN_HEAP) { |
907 | 907 |
_delta2->push(vb, _blossom_set->classPrio(vb) - |
908 | 908 |
(*_blossom_data)[vb].offset); |
909 | 909 |
} else if ((*_delta2)[vb] > _blossom_set->classPrio(vb) - |
910 | 910 |
(*_blossom_data)[vb].offset){ |
911 | 911 |
_delta2->decrease(vb, _blossom_set->classPrio(vb) - |
912 | 912 |
(*_blossom_data)[vb].offset); |
913 | 913 |
} |
914 | 914 |
} |
915 | 915 |
} |
916 | 916 |
} |
917 | 917 |
} |
918 | 918 |
} |
919 | 919 |
(*_blossom_data)[blossom].offset = 0; |
920 | 920 |
} |
921 | 921 |
|
922 | 922 |
void matchedToOdd(int blossom) { |
923 | 923 |
if (_delta2->state(blossom) == _delta2->IN_HEAP) { |
924 | 924 |
_delta2->erase(blossom); |
925 | 925 |
} |
926 | 926 |
(*_blossom_data)[blossom].offset += _delta_sum; |
927 | 927 |
if (!_blossom_set->trivial(blossom)) { |
928 | 928 |
_delta4->push(blossom, (*_blossom_data)[blossom].pot / 2 + |
929 | 929 |
(*_blossom_data)[blossom].offset); |
930 | 930 |
} |
931 | 931 |
} |
932 | 932 |
|
933 | 933 |
void evenToMatched(int blossom, int tree) { |
934 | 934 |
if (!_blossom_set->trivial(blossom)) { |
935 | 935 |
(*_blossom_data)[blossom].pot += 2 * _delta_sum; |
936 | 936 |
} |
937 | 937 |
|
938 | 938 |
for (typename BlossomSet::ItemIt n(*_blossom_set, blossom); |
939 | 939 |
n != INVALID; ++n) { |
940 | 940 |
int ni = (*_node_index)[n]; |
941 | 941 |
(*_node_data)[ni].pot -= _delta_sum; |
942 | 942 |
|
943 | 943 |
_delta1->erase(n); |
944 | 944 |
|
945 | 945 |
for (InArcIt e(_graph, n); e != INVALID; ++e) { |
946 | 946 |
Node v = _graph.source(e); |
947 | 947 |
int vb = _blossom_set->find(v); |
948 | 948 |
int vi = (*_node_index)[v]; |
949 | 949 |
|
950 | 950 |
Value rw = (*_node_data)[ni].pot + (*_node_data)[vi].pot - |
951 | 951 |
dualScale * _weight[e]; |
952 | 952 |
|
953 | 953 |
if (vb == blossom) { |
954 | 954 |
if (_delta3->state(e) == _delta3->IN_HEAP) { |
955 | 955 |
_delta3->erase(e); |
956 | 956 |
} |
957 | 957 |
} else if ((*_blossom_data)[vb].status == EVEN) { |
958 | 958 |
|
959 | 959 |
if (_delta3->state(e) == _delta3->IN_HEAP) { |
960 | 960 |
_delta3->erase(e); |
961 | 961 |
} |
962 | 962 |
|
963 | 963 |
int vt = _tree_set->find(vb); |
964 | 964 |
|
965 | 965 |
if (vt != tree) { |
966 | 966 |
|
967 | 967 |
Arc r = _graph.oppositeArc(e); |
968 | 968 |
|
969 | 969 |
typename std::map<int, Arc>::iterator it = |
970 | 970 |
(*_node_data)[ni].heap_index.find(vt); |
971 | 971 |
|
972 | 972 |
if (it != (*_node_data)[ni].heap_index.end()) { |
973 | 973 |
if ((*_node_data)[ni].heap[it->second] > rw) { |
974 | 974 |
(*_node_data)[ni].heap.replace(it->second, r); |
975 | 975 |
(*_node_data)[ni].heap.decrease(r, rw); |
976 | 976 |
it->second = r; |
977 | 977 |
} |
978 | 978 |
} else { |
979 | 979 |
(*_node_data)[ni].heap.push(r, rw); |
980 | 980 |
(*_node_data)[ni].heap_index.insert(std::make_pair(vt, r)); |
981 | 981 |
} |
982 | 982 |
|
983 | 983 |
if ((*_blossom_set)[n] > (*_node_data)[ni].heap.prio()) { |
984 | 984 |
_blossom_set->decrease(n, (*_node_data)[ni].heap.prio()); |
985 | 985 |
|
986 | 986 |
if (_delta2->state(blossom) != _delta2->IN_HEAP) { |
987 | 987 |
_delta2->push(blossom, _blossom_set->classPrio(blossom) - |
988 | 988 |
(*_blossom_data)[blossom].offset); |
989 | 989 |
} else if ((*_delta2)[blossom] > |
990 | 990 |
_blossom_set->classPrio(blossom) - |
991 | 991 |
(*_blossom_data)[blossom].offset){ |
992 | 992 |
_delta2->decrease(blossom, _blossom_set->classPrio(blossom) - |
993 | 993 |
(*_blossom_data)[blossom].offset); |
994 | 994 |
} |
995 | 995 |
} |
996 | 996 |
} |
997 | 997 |
|
998 | 998 |
} else if ((*_blossom_data)[vb].status == UNMATCHED) { |
999 | 999 |
if (_delta3->state(e) == _delta3->IN_HEAP) { |
1000 | 1000 |
_delta3->erase(e); |
1001 | 1001 |
} |
1002 | 1002 |
} else { |
1003 | 1003 |
|
1004 | 1004 |
typename std::map<int, Arc>::iterator it = |
1005 | 1005 |
(*_node_data)[vi].heap_index.find(tree); |
1006 | 1006 |
|
1007 | 1007 |
if (it != (*_node_data)[vi].heap_index.end()) { |
1008 | 1008 |
(*_node_data)[vi].heap.erase(it->second); |
1009 | 1009 |
(*_node_data)[vi].heap_index.erase(it); |
1010 | 1010 |
if ((*_node_data)[vi].heap.empty()) { |
1011 | 1011 |
_blossom_set->increase(v, std::numeric_limits<Value>::max()); |
1012 | 1012 |
} else if ((*_blossom_set)[v] < (*_node_data)[vi].heap.prio()) { |
1013 | 1013 |
_blossom_set->increase(v, (*_node_data)[vi].heap.prio()); |
1014 | 1014 |
} |
1015 | 1015 |
|
1016 | 1016 |
if ((*_blossom_data)[vb].status == MATCHED) { |
1017 | 1017 |
if (_blossom_set->classPrio(vb) == |
1018 | 1018 |
std::numeric_limits<Value>::max()) { |
1019 | 1019 |
_delta2->erase(vb); |
1020 | 1020 |
} else if ((*_delta2)[vb] < _blossom_set->classPrio(vb) - |
1021 | 1021 |
(*_blossom_data)[vb].offset) { |
1022 | 1022 |
_delta2->increase(vb, _blossom_set->classPrio(vb) - |
1023 | 1023 |
(*_blossom_data)[vb].offset); |
1024 | 1024 |
} |
1025 | 1025 |
} |
1026 | 1026 |
} |
1027 | 1027 |
} |
1028 | 1028 |
} |
1029 | 1029 |
} |
1030 | 1030 |
} |
1031 | 1031 |
|
1032 | 1032 |
void oddToMatched(int blossom) { |
1033 | 1033 |
(*_blossom_data)[blossom].offset -= _delta_sum; |
1034 | 1034 |
|
1035 | 1035 |
if (_blossom_set->classPrio(blossom) != |
1036 | 1036 |
std::numeric_limits<Value>::max()) { |
1037 | 1037 |
_delta2->push(blossom, _blossom_set->classPrio(blossom) - |
1038 | 1038 |
(*_blossom_data)[blossom].offset); |
1039 | 1039 |
} |
1040 | 1040 |
|
1041 | 1041 |
if (!_blossom_set->trivial(blossom)) { |
1042 | 1042 |
_delta4->erase(blossom); |
1043 | 1043 |
} |
1044 | 1044 |
} |
1045 | 1045 |
|
1046 | 1046 |
void oddToEven(int blossom, int tree) { |
1047 | 1047 |
if (!_blossom_set->trivial(blossom)) { |
1048 | 1048 |
_delta4->erase(blossom); |
1049 | 1049 |
(*_blossom_data)[blossom].pot -= |
1050 | 1050 |
2 * (2 * _delta_sum - (*_blossom_data)[blossom].offset); |
1051 | 1051 |
} |
1052 | 1052 |
|
1053 | 1053 |
for (typename BlossomSet::ItemIt n(*_blossom_set, blossom); |
1054 | 1054 |
n != INVALID; ++n) { |
1055 | 1055 |
int ni = (*_node_index)[n]; |
1056 | 1056 |
|
1057 | 1057 |
_blossom_set->increase(n, std::numeric_limits<Value>::max()); |
1058 | 1058 |
|
1059 | 1059 |
(*_node_data)[ni].heap.clear(); |
1060 | 1060 |
(*_node_data)[ni].heap_index.clear(); |
1061 | 1061 |
(*_node_data)[ni].pot += |
1062 | 1062 |
2 * _delta_sum - (*_blossom_data)[blossom].offset; |
1063 | 1063 |
|
1064 | 1064 |
_delta1->push(n, (*_node_data)[ni].pot); |
1065 | 1065 |
|
1066 | 1066 |
for (InArcIt e(_graph, n); e != INVALID; ++e) { |
1067 | 1067 |
Node v = _graph.source(e); |
1068 | 1068 |
int vb = _blossom_set->find(v); |
1069 | 1069 |
int vi = (*_node_index)[v]; |
1070 | 1070 |
|
1071 | 1071 |
Value rw = (*_node_data)[ni].pot + (*_node_data)[vi].pot - |
1072 | 1072 |
dualScale * _weight[e]; |
1073 | 1073 |
|
1074 | 1074 |
if ((*_blossom_data)[vb].status == EVEN) { |
1075 | 1075 |
if (_delta3->state(e) != _delta3->IN_HEAP && blossom != vb) { |
1076 | 1076 |
_delta3->push(e, rw / 2); |
1077 | 1077 |
} |
1078 | 1078 |
} else if ((*_blossom_data)[vb].status == UNMATCHED) { |
1079 | 1079 |
if (_delta3->state(e) != _delta3->IN_HEAP) { |
1080 | 1080 |
_delta3->push(e, rw); |
1081 | 1081 |
} |
1082 | 1082 |
} else { |
1083 | 1083 |
|
1084 | 1084 |
typename std::map<int, Arc>::iterator it = |
1085 | 1085 |
(*_node_data)[vi].heap_index.find(tree); |
1086 | 1086 |
|
1087 | 1087 |
if (it != (*_node_data)[vi].heap_index.end()) { |
1088 | 1088 |
if ((*_node_data)[vi].heap[it->second] > rw) { |
1089 | 1089 |
(*_node_data)[vi].heap.replace(it->second, e); |
1090 | 1090 |
(*_node_data)[vi].heap.decrease(e, rw); |
1091 | 1091 |
it->second = e; |
1092 | 1092 |
} |
1093 | 1093 |
} else { |
1094 | 1094 |
(*_node_data)[vi].heap.push(e, rw); |
1095 | 1095 |
(*_node_data)[vi].heap_index.insert(std::make_pair(tree, e)); |
1096 | 1096 |
} |
1097 | 1097 |
|
1098 | 1098 |
if ((*_blossom_set)[v] > (*_node_data)[vi].heap.prio()) { |
1099 | 1099 |
_blossom_set->decrease(v, (*_node_data)[vi].heap.prio()); |
1100 | 1100 |
|
1101 | 1101 |
if ((*_blossom_data)[vb].status == MATCHED) { |
1102 | 1102 |
if (_delta2->state(vb) != _delta2->IN_HEAP) { |
1103 | 1103 |
_delta2->push(vb, _blossom_set->classPrio(vb) - |
1104 | 1104 |
(*_blossom_data)[vb].offset); |
1105 | 1105 |
} else if ((*_delta2)[vb] > _blossom_set->classPrio(vb) - |
1106 | 1106 |
(*_blossom_data)[vb].offset) { |
1107 | 1107 |
_delta2->decrease(vb, _blossom_set->classPrio(vb) - |
1108 | 1108 |
(*_blossom_data)[vb].offset); |
1109 | 1109 |
} |
1110 | 1110 |
} |
1111 | 1111 |
} |
1112 | 1112 |
} |
1113 | 1113 |
} |
1114 | 1114 |
} |
1115 | 1115 |
(*_blossom_data)[blossom].offset = 0; |
1116 | 1116 |
} |
1117 | 1117 |
|
1118 | 1118 |
|
1119 | 1119 |
void matchedToUnmatched(int blossom) { |
1120 | 1120 |
if (_delta2->state(blossom) == _delta2->IN_HEAP) { |
1121 | 1121 |
_delta2->erase(blossom); |
1122 | 1122 |
} |
1123 | 1123 |
|
1124 | 1124 |
for (typename BlossomSet::ItemIt n(*_blossom_set, blossom); |
1125 | 1125 |
n != INVALID; ++n) { |
1126 | 1126 |
int ni = (*_node_index)[n]; |
1127 | 1127 |
|
1128 | 1128 |
_blossom_set->increase(n, std::numeric_limits<Value>::max()); |
1129 | 1129 |
|
1130 | 1130 |
(*_node_data)[ni].heap.clear(); |
1131 | 1131 |
(*_node_data)[ni].heap_index.clear(); |
1132 | 1132 |
|
1133 | 1133 |
for (OutArcIt e(_graph, n); e != INVALID; ++e) { |
1134 | 1134 |
Node v = _graph.target(e); |
1135 | 1135 |
int vb = _blossom_set->find(v); |
1136 | 1136 |
int vi = (*_node_index)[v]; |
1137 | 1137 |
|
1138 | 1138 |
Value rw = (*_node_data)[ni].pot + (*_node_data)[vi].pot - |
1139 | 1139 |
dualScale * _weight[e]; |
1140 | 1140 |
|
1141 | 1141 |
if ((*_blossom_data)[vb].status == EVEN) { |
1142 | 1142 |
if (_delta3->state(e) != _delta3->IN_HEAP) { |
1143 | 1143 |
_delta3->push(e, rw); |
1144 | 1144 |
} |
1145 | 1145 |
} |
1146 | 1146 |
} |
1147 | 1147 |
} |
1148 | 1148 |
} |
1149 | 1149 |
|
1150 | 1150 |
void unmatchedToMatched(int blossom) { |
1151 | 1151 |
for (typename BlossomSet::ItemIt n(*_blossom_set, blossom); |
1152 | 1152 |
n != INVALID; ++n) { |
1153 | 1153 |
int ni = (*_node_index)[n]; |
1154 | 1154 |
|
1155 | 1155 |
for (InArcIt e(_graph, n); e != INVALID; ++e) { |
1156 | 1156 |
Node v = _graph.source(e); |
1157 | 1157 |
int vb = _blossom_set->find(v); |
1158 | 1158 |
int vi = (*_node_index)[v]; |
1159 | 1159 |
|
1160 | 1160 |
Value rw = (*_node_data)[ni].pot + (*_node_data)[vi].pot - |
1161 | 1161 |
dualScale * _weight[e]; |
1162 | 1162 |
|
1163 | 1163 |
if (vb == blossom) { |
1164 | 1164 |
if (_delta3->state(e) == _delta3->IN_HEAP) { |
1165 | 1165 |
_delta3->erase(e); |
1166 | 1166 |
} |
1167 | 1167 |
} else if ((*_blossom_data)[vb].status == EVEN) { |
1168 | 1168 |
|
1169 | 1169 |
if (_delta3->state(e) == _delta3->IN_HEAP) { |
1170 | 1170 |
_delta3->erase(e); |
1171 | 1171 |
} |
1172 | 1172 |
|
1173 | 1173 |
int vt = _tree_set->find(vb); |
1174 | 1174 |
|
1175 | 1175 |
Arc r = _graph.oppositeArc(e); |
1176 | 1176 |
|
1177 | 1177 |
typename std::map<int, Arc>::iterator it = |
1178 | 1178 |
(*_node_data)[ni].heap_index.find(vt); |
1179 | 1179 |
|
1180 | 1180 |
if (it != (*_node_data)[ni].heap_index.end()) { |
1181 | 1181 |
if ((*_node_data)[ni].heap[it->second] > rw) { |
1182 | 1182 |
(*_node_data)[ni].heap.replace(it->second, r); |
1183 | 1183 |
(*_node_data)[ni].heap.decrease(r, rw); |
1184 | 1184 |
it->second = r; |
1185 | 1185 |
} |
1186 | 1186 |
} else { |
1187 | 1187 |
(*_node_data)[ni].heap.push(r, rw); |
1188 | 1188 |
(*_node_data)[ni].heap_index.insert(std::make_pair(vt, r)); |
1189 | 1189 |
} |
1190 | 1190 |
|
1191 | 1191 |
if ((*_blossom_set)[n] > (*_node_data)[ni].heap.prio()) { |
1192 | 1192 |
_blossom_set->decrease(n, (*_node_data)[ni].heap.prio()); |
1193 | 1193 |
|
1194 | 1194 |
if (_delta2->state(blossom) != _delta2->IN_HEAP) { |
1195 | 1195 |
_delta2->push(blossom, _blossom_set->classPrio(blossom) - |
1196 | 1196 |
(*_blossom_data)[blossom].offset); |
1197 | 1197 |
} else if ((*_delta2)[blossom] > _blossom_set->classPrio(blossom)- |
1198 | 1198 |
(*_blossom_data)[blossom].offset){ |
1199 | 1199 |
_delta2->decrease(blossom, _blossom_set->classPrio(blossom) - |
1200 | 1200 |
(*_blossom_data)[blossom].offset); |
1201 | 1201 |
} |
1202 | 1202 |
} |
1203 | 1203 |
|
1204 | 1204 |
} else if ((*_blossom_data)[vb].status == UNMATCHED) { |
1205 | 1205 |
if (_delta3->state(e) == _delta3->IN_HEAP) { |
1206 | 1206 |
_delta3->erase(e); |
1207 | 1207 |
} |
1208 | 1208 |
} |
1209 | 1209 |
} |
1210 | 1210 |
} |
1211 | 1211 |
} |
1212 | 1212 |
|
1213 | 1213 |
void alternatePath(int even, int tree) { |
1214 | 1214 |
int odd; |
1215 | 1215 |
|
1216 | 1216 |
evenToMatched(even, tree); |
1217 | 1217 |
(*_blossom_data)[even].status = MATCHED; |
1218 | 1218 |
|
1219 | 1219 |
while ((*_blossom_data)[even].pred != INVALID) { |
1220 | 1220 |
odd = _blossom_set->find(_graph.target((*_blossom_data)[even].pred)); |
1221 | 1221 |
(*_blossom_data)[odd].status = MATCHED; |
1222 | 1222 |
oddToMatched(odd); |
1223 | 1223 |
(*_blossom_data)[odd].next = (*_blossom_data)[odd].pred; |
1224 | 1224 |
|
1225 | 1225 |
even = _blossom_set->find(_graph.target((*_blossom_data)[odd].pred)); |
1226 | 1226 |
(*_blossom_data)[even].status = MATCHED; |
1227 | 1227 |
evenToMatched(even, tree); |
1228 | 1228 |
(*_blossom_data)[even].next = |
1229 | 1229 |
_graph.oppositeArc((*_blossom_data)[odd].pred); |
1230 | 1230 |
} |
1231 | 1231 |
|
1232 | 1232 |
} |
1233 | 1233 |
|
1234 | 1234 |
void destroyTree(int tree) { |
1235 | 1235 |
for (TreeSet::ItemIt b(*_tree_set, tree); b != INVALID; ++b) { |
1236 | 1236 |
if ((*_blossom_data)[b].status == EVEN) { |
1237 | 1237 |
(*_blossom_data)[b].status = MATCHED; |
1238 | 1238 |
evenToMatched(b, tree); |
1239 | 1239 |
} else if ((*_blossom_data)[b].status == ODD) { |
1240 | 1240 |
(*_blossom_data)[b].status = MATCHED; |
1241 | 1241 |
oddToMatched(b); |
1242 | 1242 |
} |
1243 | 1243 |
} |
1244 | 1244 |
_tree_set->eraseClass(tree); |
1245 | 1245 |
} |
1246 | 1246 |
|
1247 | 1247 |
|
1248 | 1248 |
void unmatchNode(const Node& node) { |
1249 | 1249 |
int blossom = _blossom_set->find(node); |
1250 | 1250 |
int tree = _tree_set->find(blossom); |
1251 | 1251 |
|
1252 | 1252 |
alternatePath(blossom, tree); |
1253 | 1253 |
destroyTree(tree); |
1254 | 1254 |
|
1255 | 1255 |
(*_blossom_data)[blossom].status = UNMATCHED; |
1256 | 1256 |
(*_blossom_data)[blossom].base = node; |
1257 | 1257 |
matchedToUnmatched(blossom); |
1258 | 1258 |
} |
1259 | 1259 |
|
1260 | 1260 |
|
1261 | 1261 |
void augmentOnEdge(const Edge& edge) { |
1262 | 1262 |
|
1263 | 1263 |
int left = _blossom_set->find(_graph.u(edge)); |
1264 | 1264 |
int right = _blossom_set->find(_graph.v(edge)); |
1265 | 1265 |
|
1266 | 1266 |
if ((*_blossom_data)[left].status == EVEN) { |
1267 | 1267 |
int left_tree = _tree_set->find(left); |
1268 | 1268 |
alternatePath(left, left_tree); |
1269 | 1269 |
destroyTree(left_tree); |
1270 | 1270 |
} else { |
1271 | 1271 |
(*_blossom_data)[left].status = MATCHED; |
1272 | 1272 |
unmatchedToMatched(left); |
1273 | 1273 |
} |
1274 | 1274 |
|
1275 | 1275 |
if ((*_blossom_data)[right].status == EVEN) { |
1276 | 1276 |
int right_tree = _tree_set->find(right); |
1277 | 1277 |
alternatePath(right, right_tree); |
1278 | 1278 |
destroyTree(right_tree); |
1279 | 1279 |
} else { |
1280 | 1280 |
(*_blossom_data)[right].status = MATCHED; |
1281 | 1281 |
unmatchedToMatched(right); |
1282 | 1282 |
} |
1283 | 1283 |
|
1284 | 1284 |
(*_blossom_data)[left].next = _graph.direct(edge, true); |
1285 | 1285 |
(*_blossom_data)[right].next = _graph.direct(edge, false); |
1286 | 1286 |
} |
1287 | 1287 |
|
1288 | 1288 |
void extendOnArc(const Arc& arc) { |
1289 | 1289 |
int base = _blossom_set->find(_graph.target(arc)); |
1290 | 1290 |
int tree = _tree_set->find(base); |
1291 | 1291 |
|
1292 | 1292 |
int odd = _blossom_set->find(_graph.source(arc)); |
1293 | 1293 |
_tree_set->insert(odd, tree); |
1294 | 1294 |
(*_blossom_data)[odd].status = ODD; |
1295 | 1295 |
matchedToOdd(odd); |
1296 | 1296 |
(*_blossom_data)[odd].pred = arc; |
1297 | 1297 |
|
1298 | 1298 |
int even = _blossom_set->find(_graph.target((*_blossom_data)[odd].next)); |
1299 | 1299 |
(*_blossom_data)[even].pred = (*_blossom_data)[even].next; |
1300 | 1300 |
_tree_set->insert(even, tree); |
1301 | 1301 |
(*_blossom_data)[even].status = EVEN; |
1302 | 1302 |
matchedToEven(even, tree); |
1303 | 1303 |
} |
1304 | 1304 |
|
1305 | 1305 |
void shrinkOnEdge(const Edge& edge, int tree) { |
1306 | 1306 |
int nca = -1; |
1307 | 1307 |
std::vector<int> left_path, right_path; |
1308 | 1308 |
|
1309 | 1309 |
{ |
1310 | 1310 |
std::set<int> left_set, right_set; |
1311 | 1311 |
int left = _blossom_set->find(_graph.u(edge)); |
1312 | 1312 |
left_path.push_back(left); |
1313 | 1313 |
left_set.insert(left); |
1314 | 1314 |
|
1315 | 1315 |
int right = _blossom_set->find(_graph.v(edge)); |
1316 | 1316 |
right_path.push_back(right); |
1317 | 1317 |
right_set.insert(right); |
1318 | 1318 |
|
1319 | 1319 |
while (true) { |
1320 | 1320 |
|
1321 | 1321 |
if ((*_blossom_data)[left].pred == INVALID) break; |
1322 | 1322 |
|
1323 | 1323 |
left = |
1324 | 1324 |
_blossom_set->find(_graph.target((*_blossom_data)[left].pred)); |
1325 | 1325 |
left_path.push_back(left); |
1326 | 1326 |
left = |
1327 | 1327 |
_blossom_set->find(_graph.target((*_blossom_data)[left].pred)); |
1328 | 1328 |
left_path.push_back(left); |
1329 | 1329 |
|
1330 | 1330 |
left_set.insert(left); |
1331 | 1331 |
|
1332 | 1332 |
if (right_set.find(left) != right_set.end()) { |
1333 | 1333 |
nca = left; |
1334 | 1334 |
break; |
1335 | 1335 |
} |
1336 | 1336 |
|
1337 | 1337 |
if ((*_blossom_data)[right].pred == INVALID) break; |
1338 | 1338 |
|
1339 | 1339 |
right = |
1340 | 1340 |
_blossom_set->find(_graph.target((*_blossom_data)[right].pred)); |
1341 | 1341 |
right_path.push_back(right); |
1342 | 1342 |
right = |
1343 | 1343 |
_blossom_set->find(_graph.target((*_blossom_data)[right].pred)); |
1344 | 1344 |
right_path.push_back(right); |
1345 | 1345 |
|
1346 | 1346 |
right_set.insert(right); |
1347 | 1347 |
|
1348 | 1348 |
if (left_set.find(right) != left_set.end()) { |
1349 | 1349 |
nca = right; |
1350 | 1350 |
break; |
1351 | 1351 |
} |
1352 | 1352 |
|
1353 | 1353 |
} |
1354 | 1354 |
|
1355 | 1355 |
if (nca == -1) { |
1356 | 1356 |
if ((*_blossom_data)[left].pred == INVALID) { |
1357 | 1357 |
nca = right; |
1358 | 1358 |
while (left_set.find(nca) == left_set.end()) { |
1359 | 1359 |
nca = |
1360 | 1360 |
_blossom_set->find(_graph.target((*_blossom_data)[nca].pred)); |
1361 | 1361 |
right_path.push_back(nca); |
1362 | 1362 |
nca = |
1363 | 1363 |
_blossom_set->find(_graph.target((*_blossom_data)[nca].pred)); |
1364 | 1364 |
right_path.push_back(nca); |
1365 | 1365 |
} |
1366 | 1366 |
} else { |
1367 | 1367 |
nca = left; |
1368 | 1368 |
while (right_set.find(nca) == right_set.end()) { |
1369 | 1369 |
nca = |
1370 | 1370 |
_blossom_set->find(_graph.target((*_blossom_data)[nca].pred)); |
1371 | 1371 |
left_path.push_back(nca); |
1372 | 1372 |
nca = |
1373 | 1373 |
_blossom_set->find(_graph.target((*_blossom_data)[nca].pred)); |
1374 | 1374 |
left_path.push_back(nca); |
1375 | 1375 |
} |
1376 | 1376 |
} |
1377 | 1377 |
} |
1378 | 1378 |
} |
1379 | 1379 |
|
1380 | 1380 |
std::vector<int> subblossoms; |
1381 | 1381 |
Arc prev; |
1382 | 1382 |
|
1383 | 1383 |
prev = _graph.direct(edge, true); |
1384 | 1384 |
for (int i = 0; left_path[i] != nca; i += 2) { |
1385 | 1385 |
subblossoms.push_back(left_path[i]); |
1386 | 1386 |
(*_blossom_data)[left_path[i]].next = prev; |
1387 | 1387 |
_tree_set->erase(left_path[i]); |
1388 | 1388 |
|
1389 | 1389 |
subblossoms.push_back(left_path[i + 1]); |
1390 | 1390 |
(*_blossom_data)[left_path[i + 1]].status = EVEN; |
1391 | 1391 |
oddToEven(left_path[i + 1], tree); |
1392 | 1392 |
_tree_set->erase(left_path[i + 1]); |
1393 | 1393 |
prev = _graph.oppositeArc((*_blossom_data)[left_path[i + 1]].pred); |
1394 | 1394 |
} |
1395 | 1395 |
|
1396 | 1396 |
int k = 0; |
1397 | 1397 |
while (right_path[k] != nca) ++k; |
1398 | 1398 |
|
1399 | 1399 |
subblossoms.push_back(nca); |
1400 | 1400 |
(*_blossom_data)[nca].next = prev; |
1401 | 1401 |
|
1402 | 1402 |
for (int i = k - 2; i >= 0; i -= 2) { |
1403 | 1403 |
subblossoms.push_back(right_path[i + 1]); |
1404 | 1404 |
(*_blossom_data)[right_path[i + 1]].status = EVEN; |
1405 | 1405 |
oddToEven(right_path[i + 1], tree); |
1406 | 1406 |
_tree_set->erase(right_path[i + 1]); |
1407 | 1407 |
|
1408 | 1408 |
(*_blossom_data)[right_path[i + 1]].next = |
1409 | 1409 |
(*_blossom_data)[right_path[i + 1]].pred; |
1410 | 1410 |
|
1411 | 1411 |
subblossoms.push_back(right_path[i]); |
1412 | 1412 |
_tree_set->erase(right_path[i]); |
1413 | 1413 |
} |
1414 | 1414 |
|
1415 | 1415 |
int surface = |
1416 | 1416 |
_blossom_set->join(subblossoms.begin(), subblossoms.end()); |
1417 | 1417 |
|
1418 | 1418 |
for (int i = 0; i < int(subblossoms.size()); ++i) { |
1419 | 1419 |
if (!_blossom_set->trivial(subblossoms[i])) { |
1420 | 1420 |
(*_blossom_data)[subblossoms[i]].pot += 2 * _delta_sum; |
1421 | 1421 |
} |
1422 | 1422 |
(*_blossom_data)[subblossoms[i]].status = MATCHED; |
1423 | 1423 |
} |
1424 | 1424 |
|
1425 | 1425 |
(*_blossom_data)[surface].pot = -2 * _delta_sum; |
1426 | 1426 |
(*_blossom_data)[surface].offset = 0; |
1427 | 1427 |
(*_blossom_data)[surface].status = EVEN; |
1428 | 1428 |
(*_blossom_data)[surface].pred = (*_blossom_data)[nca].pred; |
1429 | 1429 |
(*_blossom_data)[surface].next = (*_blossom_data)[nca].pred; |
1430 | 1430 |
|
1431 | 1431 |
_tree_set->insert(surface, tree); |
1432 | 1432 |
_tree_set->erase(nca); |
1433 | 1433 |
} |
1434 | 1434 |
|
1435 | 1435 |
void splitBlossom(int blossom) { |
1436 | 1436 |
Arc next = (*_blossom_data)[blossom].next; |
1437 | 1437 |
Arc pred = (*_blossom_data)[blossom].pred; |
1438 | 1438 |
|
1439 | 1439 |
int tree = _tree_set->find(blossom); |
1440 | 1440 |
|
1441 | 1441 |
(*_blossom_data)[blossom].status = MATCHED; |
1442 | 1442 |
oddToMatched(blossom); |
1443 | 1443 |
if (_delta2->state(blossom) == _delta2->IN_HEAP) { |
1444 | 1444 |
_delta2->erase(blossom); |
1445 | 1445 |
} |
1446 | 1446 |
|
1447 | 1447 |
std::vector<int> subblossoms; |
1448 | 1448 |
_blossom_set->split(blossom, std::back_inserter(subblossoms)); |
1449 | 1449 |
|
1450 | 1450 |
Value offset = (*_blossom_data)[blossom].offset; |
1451 | 1451 |
int b = _blossom_set->find(_graph.source(pred)); |
1452 | 1452 |
int d = _blossom_set->find(_graph.source(next)); |
1453 | 1453 |
|
1454 | 1454 |
int ib = -1, id = -1; |
1455 | 1455 |
for (int i = 0; i < int(subblossoms.size()); ++i) { |
1456 | 1456 |
if (subblossoms[i] == b) ib = i; |
1457 | 1457 |
if (subblossoms[i] == d) id = i; |
1458 | 1458 |
|
1459 | 1459 |
(*_blossom_data)[subblossoms[i]].offset = offset; |
1460 | 1460 |
if (!_blossom_set->trivial(subblossoms[i])) { |
1461 | 1461 |
(*_blossom_data)[subblossoms[i]].pot -= 2 * offset; |
1462 | 1462 |
} |
1463 | 1463 |
if (_blossom_set->classPrio(subblossoms[i]) != |
1464 | 1464 |
std::numeric_limits<Value>::max()) { |
1465 | 1465 |
_delta2->push(subblossoms[i], |
1466 | 1466 |
_blossom_set->classPrio(subblossoms[i]) - |
1467 | 1467 |
(*_blossom_data)[subblossoms[i]].offset); |
1468 | 1468 |
} |
1469 | 1469 |
} |
1470 | 1470 |
|
1471 | 1471 |
if (id > ib ? ((id - ib) % 2 == 0) : ((ib - id) % 2 == 1)) { |
1472 | 1472 |
for (int i = (id + 1) % subblossoms.size(); |
1473 | 1473 |
i != ib; i = (i + 2) % subblossoms.size()) { |
1474 | 1474 |
int sb = subblossoms[i]; |
1475 | 1475 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
1476 | 1476 |
(*_blossom_data)[sb].next = |
1477 | 1477 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
1478 | 1478 |
} |
1479 | 1479 |
|
1480 | 1480 |
for (int i = ib; i != id; i = (i + 2) % subblossoms.size()) { |
1481 | 1481 |
int sb = subblossoms[i]; |
1482 | 1482 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
1483 | 1483 |
int ub = subblossoms[(i + 2) % subblossoms.size()]; |
1484 | 1484 |
|
1485 | 1485 |
(*_blossom_data)[sb].status = ODD; |
1486 | 1486 |
matchedToOdd(sb); |
1487 | 1487 |
_tree_set->insert(sb, tree); |
1488 | 1488 |
(*_blossom_data)[sb].pred = pred; |
1489 | 1489 |
(*_blossom_data)[sb].next = |
1490 | 1490 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
1491 | 1491 |
|
1492 | 1492 |
pred = (*_blossom_data)[ub].next; |
1493 | 1493 |
|
1494 | 1494 |
(*_blossom_data)[tb].status = EVEN; |
1495 | 1495 |
matchedToEven(tb, tree); |
1496 | 1496 |
_tree_set->insert(tb, tree); |
1497 | 1497 |
(*_blossom_data)[tb].pred = (*_blossom_data)[tb].next; |
1498 | 1498 |
} |
1499 | 1499 |
|
1500 | 1500 |
(*_blossom_data)[subblossoms[id]].status = ODD; |
1501 | 1501 |
matchedToOdd(subblossoms[id]); |
1502 | 1502 |
_tree_set->insert(subblossoms[id], tree); |
1503 | 1503 |
(*_blossom_data)[subblossoms[id]].next = next; |
1504 | 1504 |
(*_blossom_data)[subblossoms[id]].pred = pred; |
1505 | 1505 |
|
1506 | 1506 |
} else { |
1507 | 1507 |
|
1508 | 1508 |
for (int i = (ib + 1) % subblossoms.size(); |
1509 | 1509 |
i != id; i = (i + 2) % subblossoms.size()) { |
1510 | 1510 |
int sb = subblossoms[i]; |
1511 | 1511 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
1512 | 1512 |
(*_blossom_data)[sb].next = |
1513 | 1513 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
1514 | 1514 |
} |
1515 | 1515 |
|
1516 | 1516 |
for (int i = id; i != ib; i = (i + 2) % subblossoms.size()) { |
1517 | 1517 |
int sb = subblossoms[i]; |
1518 | 1518 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
1519 | 1519 |
int ub = subblossoms[(i + 2) % subblossoms.size()]; |
1520 | 1520 |
|
1521 | 1521 |
(*_blossom_data)[sb].status = ODD; |
1522 | 1522 |
matchedToOdd(sb); |
1523 | 1523 |
_tree_set->insert(sb, tree); |
1524 | 1524 |
(*_blossom_data)[sb].next = next; |
1525 | 1525 |
(*_blossom_data)[sb].pred = |
1526 | 1526 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
1527 | 1527 |
|
1528 | 1528 |
(*_blossom_data)[tb].status = EVEN; |
1529 | 1529 |
matchedToEven(tb, tree); |
1530 | 1530 |
_tree_set->insert(tb, tree); |
1531 | 1531 |
(*_blossom_data)[tb].pred = |
1532 | 1532 |
(*_blossom_data)[tb].next = |
1533 | 1533 |
_graph.oppositeArc((*_blossom_data)[ub].next); |
1534 | 1534 |
next = (*_blossom_data)[ub].next; |
1535 | 1535 |
} |
1536 | 1536 |
|
1537 | 1537 |
(*_blossom_data)[subblossoms[ib]].status = ODD; |
1538 | 1538 |
matchedToOdd(subblossoms[ib]); |
1539 | 1539 |
_tree_set->insert(subblossoms[ib], tree); |
1540 | 1540 |
(*_blossom_data)[subblossoms[ib]].next = next; |
1541 | 1541 |
(*_blossom_data)[subblossoms[ib]].pred = pred; |
1542 | 1542 |
} |
1543 | 1543 |
_tree_set->erase(blossom); |
1544 | 1544 |
} |
1545 | 1545 |
|
1546 | 1546 |
void extractBlossom(int blossom, const Node& base, const Arc& matching) { |
1547 | 1547 |
if (_blossom_set->trivial(blossom)) { |
1548 | 1548 |
int bi = (*_node_index)[base]; |
1549 | 1549 |
Value pot = (*_node_data)[bi].pot; |
1550 | 1550 |
|
1551 |
_matching |
|
1551 |
(*_matching)[base] = matching; |
|
1552 | 1552 |
_blossom_node_list.push_back(base); |
1553 |
_node_potential |
|
1553 |
(*_node_potential)[base] = pot; |
|
1554 | 1554 |
} else { |
1555 | 1555 |
|
1556 | 1556 |
Value pot = (*_blossom_data)[blossom].pot; |
1557 | 1557 |
int bn = _blossom_node_list.size(); |
1558 | 1558 |
|
1559 | 1559 |
std::vector<int> subblossoms; |
1560 | 1560 |
_blossom_set->split(blossom, std::back_inserter(subblossoms)); |
1561 | 1561 |
int b = _blossom_set->find(base); |
1562 | 1562 |
int ib = -1; |
1563 | 1563 |
for (int i = 0; i < int(subblossoms.size()); ++i) { |
1564 | 1564 |
if (subblossoms[i] == b) { ib = i; break; } |
1565 | 1565 |
} |
1566 | 1566 |
|
1567 | 1567 |
for (int i = 1; i < int(subblossoms.size()); i += 2) { |
1568 | 1568 |
int sb = subblossoms[(ib + i) % subblossoms.size()]; |
1569 | 1569 |
int tb = subblossoms[(ib + i + 1) % subblossoms.size()]; |
1570 | 1570 |
|
1571 | 1571 |
Arc m = (*_blossom_data)[tb].next; |
1572 | 1572 |
extractBlossom(sb, _graph.target(m), _graph.oppositeArc(m)); |
1573 | 1573 |
extractBlossom(tb, _graph.source(m), m); |
1574 | 1574 |
} |
1575 | 1575 |
extractBlossom(subblossoms[ib], base, matching); |
1576 | 1576 |
|
1577 | 1577 |
int en = _blossom_node_list.size(); |
1578 | 1578 |
|
1579 | 1579 |
_blossom_potential.push_back(BlossomVariable(bn, en, pot)); |
1580 | 1580 |
} |
1581 | 1581 |
} |
1582 | 1582 |
|
1583 | 1583 |
void extractMatching() { |
1584 | 1584 |
std::vector<int> blossoms; |
1585 | 1585 |
for (typename BlossomSet::ClassIt c(*_blossom_set); c != INVALID; ++c) { |
1586 | 1586 |
blossoms.push_back(c); |
1587 | 1587 |
} |
1588 | 1588 |
|
1589 | 1589 |
for (int i = 0; i < int(blossoms.size()); ++i) { |
1590 | 1590 |
if ((*_blossom_data)[blossoms[i]].status == MATCHED) { |
1591 | 1591 |
|
1592 | 1592 |
Value offset = (*_blossom_data)[blossoms[i]].offset; |
1593 | 1593 |
(*_blossom_data)[blossoms[i]].pot += 2 * offset; |
1594 | 1594 |
for (typename BlossomSet::ItemIt n(*_blossom_set, blossoms[i]); |
1595 | 1595 |
n != INVALID; ++n) { |
1596 | 1596 |
(*_node_data)[(*_node_index)[n]].pot -= offset; |
1597 | 1597 |
} |
1598 | 1598 |
|
1599 | 1599 |
Arc matching = (*_blossom_data)[blossoms[i]].next; |
1600 | 1600 |
Node base = _graph.source(matching); |
1601 | 1601 |
extractBlossom(blossoms[i], base, matching); |
1602 | 1602 |
} else { |
1603 | 1603 |
Node base = (*_blossom_data)[blossoms[i]].base; |
1604 | 1604 |
extractBlossom(blossoms[i], base, INVALID); |
1605 | 1605 |
} |
1606 | 1606 |
} |
1607 | 1607 |
} |
1608 | 1608 |
|
1609 | 1609 |
public: |
1610 | 1610 |
|
1611 | 1611 |
/// \brief Constructor |
1612 | 1612 |
/// |
1613 | 1613 |
/// Constructor. |
1614 | 1614 |
MaxWeightedMatching(const Graph& graph, const WeightMap& weight) |
1615 | 1615 |
: _graph(graph), _weight(weight), _matching(0), |
1616 | 1616 |
_node_potential(0), _blossom_potential(), _blossom_node_list(), |
1617 | 1617 |
_node_num(0), _blossom_num(0), |
1618 | 1618 |
|
1619 | 1619 |
_blossom_index(0), _blossom_set(0), _blossom_data(0), |
1620 | 1620 |
_node_index(0), _node_heap_index(0), _node_data(0), |
1621 | 1621 |
_tree_set_index(0), _tree_set(0), |
1622 | 1622 |
|
1623 | 1623 |
_delta1_index(0), _delta1(0), |
1624 | 1624 |
_delta2_index(0), _delta2(0), |
1625 | 1625 |
_delta3_index(0), _delta3(0), |
1626 | 1626 |
_delta4_index(0), _delta4(0), |
1627 | 1627 |
|
1628 | 1628 |
_delta_sum() {} |
1629 | 1629 |
|
1630 | 1630 |
~MaxWeightedMatching() { |
1631 | 1631 |
destroyStructures(); |
1632 | 1632 |
} |
1633 | 1633 |
|
1634 | 1634 |
/// \name Execution control |
1635 | 1635 |
/// The simplest way to execute the algorithm is to use the |
1636 | 1636 |
/// \c run() member function. |
1637 | 1637 |
|
1638 | 1638 |
///@{ |
1639 | 1639 |
|
1640 | 1640 |
/// \brief Initialize the algorithm |
1641 | 1641 |
/// |
1642 | 1642 |
/// Initialize the algorithm |
1643 | 1643 |
void init() { |
1644 | 1644 |
createStructures(); |
1645 | 1645 |
|
1646 | 1646 |
for (ArcIt e(_graph); e != INVALID; ++e) { |
1647 |
_node_heap_index |
|
1647 |
(*_node_heap_index)[e] = BinHeap<Value, IntArcMap>::PRE_HEAP; |
|
1648 | 1648 |
} |
1649 | 1649 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
1650 |
_delta1_index |
|
1650 |
(*_delta1_index)[n] = _delta1->PRE_HEAP; |
|
1651 | 1651 |
} |
1652 | 1652 |
for (EdgeIt e(_graph); e != INVALID; ++e) { |
1653 |
_delta3_index |
|
1653 |
(*_delta3_index)[e] = _delta3->PRE_HEAP; |
|
1654 | 1654 |
} |
1655 | 1655 |
for (int i = 0; i < _blossom_num; ++i) { |
1656 |
_delta2_index->set(i, _delta2->PRE_HEAP); |
|
1657 |
_delta4_index->set(i, _delta4->PRE_HEAP); |
|
1656 |
(*_delta2_index)[i] = _delta2->PRE_HEAP; |
|
1657 |
(*_delta4_index)[i] = _delta4->PRE_HEAP; |
|
1658 | 1658 |
} |
1659 | 1659 |
|
1660 | 1660 |
int index = 0; |
1661 | 1661 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
1662 | 1662 |
Value max = 0; |
1663 | 1663 |
for (OutArcIt e(_graph, n); e != INVALID; ++e) { |
1664 | 1664 |
if (_graph.target(e) == n) continue; |
1665 | 1665 |
if ((dualScale * _weight[e]) / 2 > max) { |
1666 | 1666 |
max = (dualScale * _weight[e]) / 2; |
1667 | 1667 |
} |
1668 | 1668 |
} |
1669 |
_node_index |
|
1669 |
(*_node_index)[n] = index; |
|
1670 | 1670 |
(*_node_data)[index].pot = max; |
1671 | 1671 |
_delta1->push(n, max); |
1672 | 1672 |
int blossom = |
1673 | 1673 |
_blossom_set->insert(n, std::numeric_limits<Value>::max()); |
1674 | 1674 |
|
1675 | 1675 |
_tree_set->insert(blossom); |
1676 | 1676 |
|
1677 | 1677 |
(*_blossom_data)[blossom].status = EVEN; |
1678 | 1678 |
(*_blossom_data)[blossom].pred = INVALID; |
1679 | 1679 |
(*_blossom_data)[blossom].next = INVALID; |
1680 | 1680 |
(*_blossom_data)[blossom].pot = 0; |
1681 | 1681 |
(*_blossom_data)[blossom].offset = 0; |
1682 | 1682 |
++index; |
1683 | 1683 |
} |
1684 | 1684 |
for (EdgeIt e(_graph); e != INVALID; ++e) { |
1685 | 1685 |
int si = (*_node_index)[_graph.u(e)]; |
1686 | 1686 |
int ti = (*_node_index)[_graph.v(e)]; |
1687 | 1687 |
if (_graph.u(e) != _graph.v(e)) { |
1688 | 1688 |
_delta3->push(e, ((*_node_data)[si].pot + (*_node_data)[ti].pot - |
1689 | 1689 |
dualScale * _weight[e]) / 2); |
1690 | 1690 |
} |
1691 | 1691 |
} |
1692 | 1692 |
} |
1693 | 1693 |
|
1694 | 1694 |
/// \brief Starts the algorithm |
1695 | 1695 |
/// |
1696 | 1696 |
/// Starts the algorithm |
1697 | 1697 |
void start() { |
1698 | 1698 |
enum OpType { |
1699 | 1699 |
D1, D2, D3, D4 |
1700 | 1700 |
}; |
1701 | 1701 |
|
1702 | 1702 |
int unmatched = _node_num; |
1703 | 1703 |
while (unmatched > 0) { |
1704 | 1704 |
Value d1 = !_delta1->empty() ? |
1705 | 1705 |
_delta1->prio() : std::numeric_limits<Value>::max(); |
1706 | 1706 |
|
1707 | 1707 |
Value d2 = !_delta2->empty() ? |
1708 | 1708 |
_delta2->prio() : std::numeric_limits<Value>::max(); |
1709 | 1709 |
|
1710 | 1710 |
Value d3 = !_delta3->empty() ? |
1711 | 1711 |
_delta3->prio() : std::numeric_limits<Value>::max(); |
1712 | 1712 |
|
1713 | 1713 |
Value d4 = !_delta4->empty() ? |
1714 | 1714 |
_delta4->prio() : std::numeric_limits<Value>::max(); |
1715 | 1715 |
|
1716 | 1716 |
_delta_sum = d1; OpType ot = D1; |
1717 | 1717 |
if (d2 < _delta_sum) { _delta_sum = d2; ot = D2; } |
1718 | 1718 |
if (d3 < _delta_sum) { _delta_sum = d3; ot = D3; } |
1719 | 1719 |
if (d4 < _delta_sum) { _delta_sum = d4; ot = D4; } |
1720 | 1720 |
|
1721 | 1721 |
|
1722 | 1722 |
switch (ot) { |
1723 | 1723 |
case D1: |
1724 | 1724 |
{ |
1725 | 1725 |
Node n = _delta1->top(); |
1726 | 1726 |
unmatchNode(n); |
1727 | 1727 |
--unmatched; |
1728 | 1728 |
} |
1729 | 1729 |
break; |
1730 | 1730 |
case D2: |
1731 | 1731 |
{ |
1732 | 1732 |
int blossom = _delta2->top(); |
1733 | 1733 |
Node n = _blossom_set->classTop(blossom); |
1734 | 1734 |
Arc e = (*_node_data)[(*_node_index)[n]].heap.top(); |
1735 | 1735 |
extendOnArc(e); |
1736 | 1736 |
} |
1737 | 1737 |
break; |
1738 | 1738 |
case D3: |
1739 | 1739 |
{ |
1740 | 1740 |
Edge e = _delta3->top(); |
1741 | 1741 |
|
1742 | 1742 |
int left_blossom = _blossom_set->find(_graph.u(e)); |
1743 | 1743 |
int right_blossom = _blossom_set->find(_graph.v(e)); |
1744 | 1744 |
|
1745 | 1745 |
if (left_blossom == right_blossom) { |
1746 | 1746 |
_delta3->pop(); |
1747 | 1747 |
} else { |
1748 | 1748 |
int left_tree; |
1749 | 1749 |
if ((*_blossom_data)[left_blossom].status == EVEN) { |
1750 | 1750 |
left_tree = _tree_set->find(left_blossom); |
1751 | 1751 |
} else { |
1752 | 1752 |
left_tree = -1; |
1753 | 1753 |
++unmatched; |
1754 | 1754 |
} |
1755 | 1755 |
int right_tree; |
1756 | 1756 |
if ((*_blossom_data)[right_blossom].status == EVEN) { |
1757 | 1757 |
right_tree = _tree_set->find(right_blossom); |
1758 | 1758 |
} else { |
1759 | 1759 |
right_tree = -1; |
1760 | 1760 |
++unmatched; |
1761 | 1761 |
} |
1762 | 1762 |
|
1763 | 1763 |
if (left_tree == right_tree) { |
1764 | 1764 |
shrinkOnEdge(e, left_tree); |
1765 | 1765 |
} else { |
1766 | 1766 |
augmentOnEdge(e); |
1767 | 1767 |
unmatched -= 2; |
1768 | 1768 |
} |
1769 | 1769 |
} |
1770 | 1770 |
} break; |
1771 | 1771 |
case D4: |
1772 | 1772 |
splitBlossom(_delta4->top()); |
1773 | 1773 |
break; |
1774 | 1774 |
} |
1775 | 1775 |
} |
1776 | 1776 |
extractMatching(); |
1777 | 1777 |
} |
1778 | 1778 |
|
1779 | 1779 |
/// \brief Runs %MaxWeightedMatching algorithm. |
1780 | 1780 |
/// |
1781 | 1781 |
/// This method runs the %MaxWeightedMatching algorithm. |
1782 | 1782 |
/// |
1783 | 1783 |
/// \note mwm.run() is just a shortcut of the following code. |
1784 | 1784 |
/// \code |
1785 | 1785 |
/// mwm.init(); |
1786 | 1786 |
/// mwm.start(); |
1787 | 1787 |
/// \endcode |
1788 | 1788 |
void run() { |
1789 | 1789 |
init(); |
1790 | 1790 |
start(); |
1791 | 1791 |
} |
1792 | 1792 |
|
1793 | 1793 |
/// @} |
1794 | 1794 |
|
1795 | 1795 |
/// \name Primal solution |
1796 | 1796 |
/// Functions to get the primal solution, ie. the matching. |
1797 | 1797 |
|
1798 | 1798 |
/// @{ |
1799 | 1799 |
|
1800 | 1800 |
/// \brief Returns the weight of the matching. |
1801 | 1801 |
/// |
1802 | 1802 |
/// Returns the weight of the matching. |
1803 | 1803 |
Value matchingValue() const { |
1804 | 1804 |
Value sum = 0; |
1805 | 1805 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
1806 | 1806 |
if ((*_matching)[n] != INVALID) { |
1807 | 1807 |
sum += _weight[(*_matching)[n]]; |
1808 | 1808 |
} |
1809 | 1809 |
} |
1810 | 1810 |
return sum /= 2; |
1811 | 1811 |
} |
1812 | 1812 |
|
1813 | 1813 |
/// \brief Returns the cardinality of the matching. |
1814 | 1814 |
/// |
1815 | 1815 |
/// Returns the cardinality of the matching. |
1816 | 1816 |
int matchingSize() const { |
1817 | 1817 |
int num = 0; |
1818 | 1818 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
1819 | 1819 |
if ((*_matching)[n] != INVALID) { |
1820 | 1820 |
++num; |
1821 | 1821 |
} |
1822 | 1822 |
} |
1823 | 1823 |
return num /= 2; |
1824 | 1824 |
} |
1825 | 1825 |
|
1826 | 1826 |
/// \brief Returns true when the edge is in the matching. |
1827 | 1827 |
/// |
1828 | 1828 |
/// Returns true when the edge is in the matching. |
1829 | 1829 |
bool matching(const Edge& edge) const { |
1830 | 1830 |
return edge == (*_matching)[_graph.u(edge)]; |
1831 | 1831 |
} |
1832 | 1832 |
|
1833 | 1833 |
/// \brief Returns the incident matching arc. |
1834 | 1834 |
/// |
1835 | 1835 |
/// Returns the incident matching arc from given node. If the |
1836 | 1836 |
/// node is not matched then it gives back \c INVALID. |
1837 | 1837 |
Arc matching(const Node& node) const { |
1838 | 1838 |
return (*_matching)[node]; |
1839 | 1839 |
} |
1840 | 1840 |
|
1841 | 1841 |
/// \brief Returns the mate of the node. |
1842 | 1842 |
/// |
1843 | 1843 |
/// Returns the adjancent node in a mathcing arc. If the node is |
1844 | 1844 |
/// not matched then it gives back \c INVALID. |
1845 | 1845 |
Node mate(const Node& node) const { |
1846 | 1846 |
return (*_matching)[node] != INVALID ? |
1847 | 1847 |
_graph.target((*_matching)[node]) : INVALID; |
1848 | 1848 |
} |
1849 | 1849 |
|
1850 | 1850 |
/// @} |
1851 | 1851 |
|
1852 | 1852 |
/// \name Dual solution |
1853 | 1853 |
/// Functions to get the dual solution. |
1854 | 1854 |
|
1855 | 1855 |
/// @{ |
1856 | 1856 |
|
1857 | 1857 |
/// \brief Returns the value of the dual solution. |
1858 | 1858 |
/// |
1859 | 1859 |
/// Returns the value of the dual solution. It should be equal to |
1860 | 1860 |
/// the primal value scaled by \ref dualScale "dual scale". |
1861 | 1861 |
Value dualValue() const { |
1862 | 1862 |
Value sum = 0; |
1863 | 1863 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
1864 | 1864 |
sum += nodeValue(n); |
1865 | 1865 |
} |
1866 | 1866 |
for (int i = 0; i < blossomNum(); ++i) { |
1867 | 1867 |
sum += blossomValue(i) * (blossomSize(i) / 2); |
1868 | 1868 |
} |
1869 | 1869 |
return sum; |
1870 | 1870 |
} |
1871 | 1871 |
|
1872 | 1872 |
/// \brief Returns the value of the node. |
1873 | 1873 |
/// |
1874 | 1874 |
/// Returns the the value of the node. |
1875 | 1875 |
Value nodeValue(const Node& n) const { |
1876 | 1876 |
return (*_node_potential)[n]; |
1877 | 1877 |
} |
1878 | 1878 |
|
1879 | 1879 |
/// \brief Returns the number of the blossoms in the basis. |
1880 | 1880 |
/// |
1881 | 1881 |
/// Returns the number of the blossoms in the basis. |
1882 | 1882 |
/// \see BlossomIt |
1883 | 1883 |
int blossomNum() const { |
1884 | 1884 |
return _blossom_potential.size(); |
1885 | 1885 |
} |
1886 | 1886 |
|
1887 | 1887 |
|
1888 | 1888 |
/// \brief Returns the number of the nodes in the blossom. |
1889 | 1889 |
/// |
1890 | 1890 |
/// Returns the number of the nodes in the blossom. |
1891 | 1891 |
int blossomSize(int k) const { |
1892 | 1892 |
return _blossom_potential[k].end - _blossom_potential[k].begin; |
1893 | 1893 |
} |
1894 | 1894 |
|
1895 | 1895 |
/// \brief Returns the value of the blossom. |
1896 | 1896 |
/// |
1897 | 1897 |
/// Returns the the value of the blossom. |
1898 | 1898 |
/// \see BlossomIt |
1899 | 1899 |
Value blossomValue(int k) const { |
1900 | 1900 |
return _blossom_potential[k].value; |
1901 | 1901 |
} |
1902 | 1902 |
|
1903 | 1903 |
/// \brief Iterator for obtaining the nodes of the blossom. |
1904 | 1904 |
/// |
1905 | 1905 |
/// Iterator for obtaining the nodes of the blossom. This class |
1906 | 1906 |
/// provides a common lemon style iterator for listing a |
1907 | 1907 |
/// subset of the nodes. |
1908 | 1908 |
class BlossomIt { |
1909 | 1909 |
public: |
1910 | 1910 |
|
1911 | 1911 |
/// \brief Constructor. |
1912 | 1912 |
/// |
1913 | 1913 |
/// Constructor to get the nodes of the variable. |
1914 | 1914 |
BlossomIt(const MaxWeightedMatching& algorithm, int variable) |
1915 | 1915 |
: _algorithm(&algorithm) |
1916 | 1916 |
{ |
1917 | 1917 |
_index = _algorithm->_blossom_potential[variable].begin; |
1918 | 1918 |
_last = _algorithm->_blossom_potential[variable].end; |
1919 | 1919 |
} |
1920 | 1920 |
|
1921 | 1921 |
/// \brief Conversion to node. |
1922 | 1922 |
/// |
1923 | 1923 |
/// Conversion to node. |
1924 | 1924 |
operator Node() const { |
1925 | 1925 |
return _algorithm->_blossom_node_list[_index]; |
1926 | 1926 |
} |
1927 | 1927 |
|
1928 | 1928 |
/// \brief Increment operator. |
1929 | 1929 |
/// |
1930 | 1930 |
/// Increment operator. |
1931 | 1931 |
BlossomIt& operator++() { |
1932 | 1932 |
++_index; |
1933 | 1933 |
return *this; |
1934 | 1934 |
} |
1935 | 1935 |
|
1936 | 1936 |
/// \brief Validity checking |
1937 | 1937 |
/// |
1938 | 1938 |
/// Checks whether the iterator is invalid. |
1939 | 1939 |
bool operator==(Invalid) const { return _index == _last; } |
1940 | 1940 |
|
1941 | 1941 |
/// \brief Validity checking |
1942 | 1942 |
/// |
1943 | 1943 |
/// Checks whether the iterator is valid. |
1944 | 1944 |
bool operator!=(Invalid) const { return _index != _last; } |
1945 | 1945 |
|
1946 | 1946 |
private: |
1947 | 1947 |
const MaxWeightedMatching* _algorithm; |
1948 | 1948 |
int _last; |
1949 | 1949 |
int _index; |
1950 | 1950 |
}; |
1951 | 1951 |
|
1952 | 1952 |
/// @} |
1953 | 1953 |
|
1954 | 1954 |
}; |
1955 | 1955 |
|
1956 | 1956 |
/// \ingroup matching |
1957 | 1957 |
/// |
1958 | 1958 |
/// \brief Weighted perfect matching in general graphs |
1959 | 1959 |
/// |
1960 | 1960 |
/// This class provides an efficient implementation of Edmond's |
1961 | 1961 |
/// maximum weighted perfect matching algorithm. The implementation |
1962 | 1962 |
/// is based on extensive use of priority queues and provides |
1963 | 1963 |
/// \f$O(nm\log n)\f$ time complexity. |
1964 | 1964 |
/// |
1965 | 1965 |
/// The maximum weighted matching problem is to find undirected |
1966 | 1966 |
/// edges in the graph with maximum overall weight and no two of |
1967 | 1967 |
/// them shares their ends and covers all nodes. The problem can be |
1968 | 1968 |
/// formulated with the following linear program. |
1969 | 1969 |
/// \f[ \sum_{e \in \delta(u)}x_e = 1 \quad \forall u\in V\f] |
1970 | 1970 |
/** \f[ \sum_{e \in \gamma(B)}x_e \le \frac{\vert B \vert - 1}{2} |
1971 | 1971 |
\quad \forall B\in\mathcal{O}\f] */ |
1972 | 1972 |
/// \f[x_e \ge 0\quad \forall e\in E\f] |
1973 | 1973 |
/// \f[\max \sum_{e\in E}x_ew_e\f] |
1974 | 1974 |
/// where \f$\delta(X)\f$ is the set of edges incident to a node in |
1975 | 1975 |
/// \f$X\f$, \f$\gamma(X)\f$ is the set of edges with both ends in |
1976 | 1976 |
/// \f$X\f$ and \f$\mathcal{O}\f$ is the set of odd cardinality |
1977 | 1977 |
/// subsets of the nodes. |
1978 | 1978 |
/// |
1979 | 1979 |
/// The algorithm calculates an optimal matching and a proof of the |
1980 | 1980 |
/// optimality. The solution of the dual problem can be used to check |
1981 | 1981 |
/// the result of the algorithm. The dual linear problem is the |
1982 | 1982 |
/** \f[ y_u + y_v + \sum_{B \in \mathcal{O}, uv \in \gamma(B)}z_B \ge |
1983 | 1983 |
w_{uv} \quad \forall uv\in E\f] */ |
1984 | 1984 |
/// \f[z_B \ge 0 \quad \forall B \in \mathcal{O}\f] |
1985 | 1985 |
/** \f[\min \sum_{u \in V}y_u + \sum_{B \in \mathcal{O}} |
1986 | 1986 |
\frac{\vert B \vert - 1}{2}z_B\f] */ |
1987 | 1987 |
/// |
1988 | 1988 |
/// The algorithm can be executed with \c run() or the \c init() and |
1989 | 1989 |
/// then the \c start() member functions. After it the matching can |
1990 | 1990 |
/// be asked with \c matching() or mate() functions. The dual |
1991 | 1991 |
/// solution can be get with \c nodeValue(), \c blossomNum() and \c |
1992 | 1992 |
/// blossomValue() members and \ref MaxWeightedMatching::BlossomIt |
1993 | 1993 |
/// "BlossomIt" nested class which is able to iterate on the nodes |
1994 | 1994 |
/// of a blossom. If the value type is integral then the dual |
1995 | 1995 |
/// solution is multiplied by \ref MaxWeightedMatching::dualScale "4". |
1996 | 1996 |
template <typename GR, |
1997 | 1997 |
typename WM = typename GR::template EdgeMap<int> > |
1998 | 1998 |
class MaxWeightedPerfectMatching { |
1999 | 1999 |
public: |
2000 | 2000 |
|
2001 | 2001 |
typedef GR Graph; |
2002 | 2002 |
typedef WM WeightMap; |
2003 | 2003 |
typedef typename WeightMap::Value Value; |
2004 | 2004 |
|
2005 | 2005 |
/// \brief Scaling factor for dual solution |
2006 | 2006 |
/// |
2007 | 2007 |
/// Scaling factor for dual solution, it is equal to 4 or 1 |
2008 | 2008 |
/// according to the value type. |
2009 | 2009 |
static const int dualScale = |
2010 | 2010 |
std::numeric_limits<Value>::is_integer ? 4 : 1; |
2011 | 2011 |
|
2012 | 2012 |
typedef typename Graph::template NodeMap<typename Graph::Arc> |
2013 | 2013 |
MatchingMap; |
2014 | 2014 |
|
2015 | 2015 |
private: |
2016 | 2016 |
|
2017 | 2017 |
TEMPLATE_GRAPH_TYPEDEFS(Graph); |
2018 | 2018 |
|
2019 | 2019 |
typedef typename Graph::template NodeMap<Value> NodePotential; |
2020 | 2020 |
typedef std::vector<Node> BlossomNodeList; |
2021 | 2021 |
|
2022 | 2022 |
struct BlossomVariable { |
2023 | 2023 |
int begin, end; |
2024 | 2024 |
Value value; |
2025 | 2025 |
|
2026 | 2026 |
BlossomVariable(int _begin, int _end, Value _value) |
2027 | 2027 |
: begin(_begin), end(_end), value(_value) {} |
2028 | 2028 |
|
2029 | 2029 |
}; |
2030 | 2030 |
|
2031 | 2031 |
typedef std::vector<BlossomVariable> BlossomPotential; |
2032 | 2032 |
|
2033 | 2033 |
const Graph& _graph; |
2034 | 2034 |
const WeightMap& _weight; |
2035 | 2035 |
|
2036 | 2036 |
MatchingMap* _matching; |
2037 | 2037 |
|
2038 | 2038 |
NodePotential* _node_potential; |
2039 | 2039 |
|
2040 | 2040 |
BlossomPotential _blossom_potential; |
2041 | 2041 |
BlossomNodeList _blossom_node_list; |
2042 | 2042 |
|
2043 | 2043 |
int _node_num; |
2044 | 2044 |
int _blossom_num; |
2045 | 2045 |
|
2046 | 2046 |
typedef RangeMap<int> IntIntMap; |
2047 | 2047 |
|
2048 | 2048 |
enum Status { |
2049 | 2049 |
EVEN = -1, MATCHED = 0, ODD = 1 |
2050 | 2050 |
}; |
2051 | 2051 |
|
2052 | 2052 |
typedef HeapUnionFind<Value, IntNodeMap> BlossomSet; |
2053 | 2053 |
struct BlossomData { |
2054 | 2054 |
int tree; |
2055 | 2055 |
Status status; |
2056 | 2056 |
Arc pred, next; |
2057 | 2057 |
Value pot, offset; |
2058 | 2058 |
}; |
2059 | 2059 |
|
2060 | 2060 |
IntNodeMap *_blossom_index; |
2061 | 2061 |
BlossomSet *_blossom_set; |
2062 | 2062 |
RangeMap<BlossomData>* _blossom_data; |
2063 | 2063 |
|
2064 | 2064 |
IntNodeMap *_node_index; |
2065 | 2065 |
IntArcMap *_node_heap_index; |
2066 | 2066 |
|
2067 | 2067 |
struct NodeData { |
2068 | 2068 |
|
2069 | 2069 |
NodeData(IntArcMap& node_heap_index) |
2070 | 2070 |
: heap(node_heap_index) {} |
2071 | 2071 |
|
2072 | 2072 |
int blossom; |
2073 | 2073 |
Value pot; |
2074 | 2074 |
BinHeap<Value, IntArcMap> heap; |
2075 | 2075 |
std::map<int, Arc> heap_index; |
2076 | 2076 |
|
2077 | 2077 |
int tree; |
2078 | 2078 |
}; |
2079 | 2079 |
|
2080 | 2080 |
RangeMap<NodeData>* _node_data; |
2081 | 2081 |
|
2082 | 2082 |
typedef ExtendFindEnum<IntIntMap> TreeSet; |
2083 | 2083 |
|
2084 | 2084 |
IntIntMap *_tree_set_index; |
2085 | 2085 |
TreeSet *_tree_set; |
2086 | 2086 |
|
2087 | 2087 |
IntIntMap *_delta2_index; |
2088 | 2088 |
BinHeap<Value, IntIntMap> *_delta2; |
2089 | 2089 |
|
2090 | 2090 |
IntEdgeMap *_delta3_index; |
2091 | 2091 |
BinHeap<Value, IntEdgeMap> *_delta3; |
2092 | 2092 |
|
2093 | 2093 |
IntIntMap *_delta4_index; |
2094 | 2094 |
BinHeap<Value, IntIntMap> *_delta4; |
2095 | 2095 |
|
2096 | 2096 |
Value _delta_sum; |
2097 | 2097 |
|
2098 | 2098 |
void createStructures() { |
2099 | 2099 |
_node_num = countNodes(_graph); |
2100 | 2100 |
_blossom_num = _node_num * 3 / 2; |
2101 | 2101 |
|
2102 | 2102 |
if (!_matching) { |
2103 | 2103 |
_matching = new MatchingMap(_graph); |
2104 | 2104 |
} |
2105 | 2105 |
if (!_node_potential) { |
2106 | 2106 |
_node_potential = new NodePotential(_graph); |
2107 | 2107 |
} |
2108 | 2108 |
if (!_blossom_set) { |
2109 | 2109 |
_blossom_index = new IntNodeMap(_graph); |
2110 | 2110 |
_blossom_set = new BlossomSet(*_blossom_index); |
2111 | 2111 |
_blossom_data = new RangeMap<BlossomData>(_blossom_num); |
2112 | 2112 |
} |
2113 | 2113 |
|
2114 | 2114 |
if (!_node_index) { |
2115 | 2115 |
_node_index = new IntNodeMap(_graph); |
2116 | 2116 |
_node_heap_index = new IntArcMap(_graph); |
2117 | 2117 |
_node_data = new RangeMap<NodeData>(_node_num, |
2118 | 2118 |
NodeData(*_node_heap_index)); |
2119 | 2119 |
} |
2120 | 2120 |
|
2121 | 2121 |
if (!_tree_set) { |
2122 | 2122 |
_tree_set_index = new IntIntMap(_blossom_num); |
2123 | 2123 |
_tree_set = new TreeSet(*_tree_set_index); |
2124 | 2124 |
} |
2125 | 2125 |
if (!_delta2) { |
2126 | 2126 |
_delta2_index = new IntIntMap(_blossom_num); |
2127 | 2127 |
_delta2 = new BinHeap<Value, IntIntMap>(*_delta2_index); |
2128 | 2128 |
} |
2129 | 2129 |
if (!_delta3) { |
2130 | 2130 |
_delta3_index = new IntEdgeMap(_graph); |
2131 | 2131 |
_delta3 = new BinHeap<Value, IntEdgeMap>(*_delta3_index); |
2132 | 2132 |
} |
2133 | 2133 |
if (!_delta4) { |
2134 | 2134 |
_delta4_index = new IntIntMap(_blossom_num); |
2135 | 2135 |
_delta4 = new BinHeap<Value, IntIntMap>(*_delta4_index); |
2136 | 2136 |
} |
2137 | 2137 |
} |
2138 | 2138 |
|
2139 | 2139 |
void destroyStructures() { |
2140 | 2140 |
_node_num = countNodes(_graph); |
2141 | 2141 |
_blossom_num = _node_num * 3 / 2; |
2142 | 2142 |
|
2143 | 2143 |
if (_matching) { |
2144 | 2144 |
delete _matching; |
2145 | 2145 |
} |
2146 | 2146 |
if (_node_potential) { |
2147 | 2147 |
delete _node_potential; |
2148 | 2148 |
} |
2149 | 2149 |
if (_blossom_set) { |
2150 | 2150 |
delete _blossom_index; |
2151 | 2151 |
delete _blossom_set; |
2152 | 2152 |
delete _blossom_data; |
2153 | 2153 |
} |
2154 | 2154 |
|
2155 | 2155 |
if (_node_index) { |
2156 | 2156 |
delete _node_index; |
2157 | 2157 |
delete _node_heap_index; |
2158 | 2158 |
delete _node_data; |
2159 | 2159 |
} |
2160 | 2160 |
|
2161 | 2161 |
if (_tree_set) { |
2162 | 2162 |
delete _tree_set_index; |
2163 | 2163 |
delete _tree_set; |
2164 | 2164 |
} |
2165 | 2165 |
if (_delta2) { |
2166 | 2166 |
delete _delta2_index; |
2167 | 2167 |
delete _delta2; |
2168 | 2168 |
} |
2169 | 2169 |
if (_delta3) { |
2170 | 2170 |
delete _delta3_index; |
2171 | 2171 |
delete _delta3; |
2172 | 2172 |
} |
2173 | 2173 |
if (_delta4) { |
2174 | 2174 |
delete _delta4_index; |
2175 | 2175 |
delete _delta4; |
2176 | 2176 |
} |
2177 | 2177 |
} |
2178 | 2178 |
|
2179 | 2179 |
void matchedToEven(int blossom, int tree) { |
2180 | 2180 |
if (_delta2->state(blossom) == _delta2->IN_HEAP) { |
2181 | 2181 |
_delta2->erase(blossom); |
2182 | 2182 |
} |
2183 | 2183 |
|
2184 | 2184 |
if (!_blossom_set->trivial(blossom)) { |
2185 | 2185 |
(*_blossom_data)[blossom].pot -= |
2186 | 2186 |
2 * (_delta_sum - (*_blossom_data)[blossom].offset); |
2187 | 2187 |
} |
2188 | 2188 |
|
2189 | 2189 |
for (typename BlossomSet::ItemIt n(*_blossom_set, blossom); |
2190 | 2190 |
n != INVALID; ++n) { |
2191 | 2191 |
|
2192 | 2192 |
_blossom_set->increase(n, std::numeric_limits<Value>::max()); |
2193 | 2193 |
int ni = (*_node_index)[n]; |
2194 | 2194 |
|
2195 | 2195 |
(*_node_data)[ni].heap.clear(); |
2196 | 2196 |
(*_node_data)[ni].heap_index.clear(); |
2197 | 2197 |
|
2198 | 2198 |
(*_node_data)[ni].pot += _delta_sum - (*_blossom_data)[blossom].offset; |
2199 | 2199 |
|
2200 | 2200 |
for (InArcIt e(_graph, n); e != INVALID; ++e) { |
2201 | 2201 |
Node v = _graph.source(e); |
2202 | 2202 |
int vb = _blossom_set->find(v); |
2203 | 2203 |
int vi = (*_node_index)[v]; |
2204 | 2204 |
|
2205 | 2205 |
Value rw = (*_node_data)[ni].pot + (*_node_data)[vi].pot - |
2206 | 2206 |
dualScale * _weight[e]; |
2207 | 2207 |
|
2208 | 2208 |
if ((*_blossom_data)[vb].status == EVEN) { |
2209 | 2209 |
if (_delta3->state(e) != _delta3->IN_HEAP && blossom != vb) { |
2210 | 2210 |
_delta3->push(e, rw / 2); |
2211 | 2211 |
} |
2212 | 2212 |
} else { |
2213 | 2213 |
typename std::map<int, Arc>::iterator it = |
2214 | 2214 |
(*_node_data)[vi].heap_index.find(tree); |
2215 | 2215 |
|
2216 | 2216 |
if (it != (*_node_data)[vi].heap_index.end()) { |
2217 | 2217 |
if ((*_node_data)[vi].heap[it->second] > rw) { |
2218 | 2218 |
(*_node_data)[vi].heap.replace(it->second, e); |
2219 | 2219 |
(*_node_data)[vi].heap.decrease(e, rw); |
2220 | 2220 |
it->second = e; |
2221 | 2221 |
} |
2222 | 2222 |
} else { |
2223 | 2223 |
(*_node_data)[vi].heap.push(e, rw); |
2224 | 2224 |
(*_node_data)[vi].heap_index.insert(std::make_pair(tree, e)); |
2225 | 2225 |
} |
2226 | 2226 |
|
2227 | 2227 |
if ((*_blossom_set)[v] > (*_node_data)[vi].heap.prio()) { |
2228 | 2228 |
_blossom_set->decrease(v, (*_node_data)[vi].heap.prio()); |
2229 | 2229 |
|
2230 | 2230 |
if ((*_blossom_data)[vb].status == MATCHED) { |
2231 | 2231 |
if (_delta2->state(vb) != _delta2->IN_HEAP) { |
2232 | 2232 |
_delta2->push(vb, _blossom_set->classPrio(vb) - |
2233 | 2233 |
(*_blossom_data)[vb].offset); |
2234 | 2234 |
} else if ((*_delta2)[vb] > _blossom_set->classPrio(vb) - |
2235 | 2235 |
(*_blossom_data)[vb].offset){ |
2236 | 2236 |
_delta2->decrease(vb, _blossom_set->classPrio(vb) - |
2237 | 2237 |
(*_blossom_data)[vb].offset); |
2238 | 2238 |
} |
2239 | 2239 |
} |
2240 | 2240 |
} |
2241 | 2241 |
} |
2242 | 2242 |
} |
2243 | 2243 |
} |
2244 | 2244 |
(*_blossom_data)[blossom].offset = 0; |
2245 | 2245 |
} |
2246 | 2246 |
|
2247 | 2247 |
void matchedToOdd(int blossom) { |
2248 | 2248 |
if (_delta2->state(blossom) == _delta2->IN_HEAP) { |
2249 | 2249 |
_delta2->erase(blossom); |
2250 | 2250 |
} |
2251 | 2251 |
(*_blossom_data)[blossom].offset += _delta_sum; |
2252 | 2252 |
if (!_blossom_set->trivial(blossom)) { |
2253 | 2253 |
_delta4->push(blossom, (*_blossom_data)[blossom].pot / 2 + |
2254 | 2254 |
(*_blossom_data)[blossom].offset); |
2255 | 2255 |
} |
2256 | 2256 |
} |
2257 | 2257 |
|
2258 | 2258 |
void evenToMatched(int blossom, int tree) { |
2259 | 2259 |
if (!_blossom_set->trivial(blossom)) { |
2260 | 2260 |
(*_blossom_data)[blossom].pot += 2 * _delta_sum; |
2261 | 2261 |
} |
2262 | 2262 |
|
2263 | 2263 |
for (typename BlossomSet::ItemIt n(*_blossom_set, blossom); |
2264 | 2264 |
n != INVALID; ++n) { |
2265 | 2265 |
int ni = (*_node_index)[n]; |
2266 | 2266 |
(*_node_data)[ni].pot -= _delta_sum; |
2267 | 2267 |
|
2268 | 2268 |
for (InArcIt e(_graph, n); e != INVALID; ++e) { |
2269 | 2269 |
Node v = _graph.source(e); |
2270 | 2270 |
int vb = _blossom_set->find(v); |
2271 | 2271 |
int vi = (*_node_index)[v]; |
2272 | 2272 |
|
2273 | 2273 |
Value rw = (*_node_data)[ni].pot + (*_node_data)[vi].pot - |
2274 | 2274 |
dualScale * _weight[e]; |
2275 | 2275 |
|
2276 | 2276 |
if (vb == blossom) { |
2277 | 2277 |
if (_delta3->state(e) == _delta3->IN_HEAP) { |
2278 | 2278 |
_delta3->erase(e); |
2279 | 2279 |
} |
2280 | 2280 |
} else if ((*_blossom_data)[vb].status == EVEN) { |
2281 | 2281 |
|
2282 | 2282 |
if (_delta3->state(e) == _delta3->IN_HEAP) { |
2283 | 2283 |
_delta3->erase(e); |
2284 | 2284 |
} |
2285 | 2285 |
|
2286 | 2286 |
int vt = _tree_set->find(vb); |
2287 | 2287 |
|
2288 | 2288 |
if (vt != tree) { |
2289 | 2289 |
|
2290 | 2290 |
Arc r = _graph.oppositeArc(e); |
2291 | 2291 |
|
2292 | 2292 |
typename std::map<int, Arc>::iterator it = |
2293 | 2293 |
(*_node_data)[ni].heap_index.find(vt); |
2294 | 2294 |
|
2295 | 2295 |
if (it != (*_node_data)[ni].heap_index.end()) { |
2296 | 2296 |
if ((*_node_data)[ni].heap[it->second] > rw) { |
2297 | 2297 |
(*_node_data)[ni].heap.replace(it->second, r); |
2298 | 2298 |
(*_node_data)[ni].heap.decrease(r, rw); |
2299 | 2299 |
it->second = r; |
2300 | 2300 |
} |
2301 | 2301 |
} else { |
2302 | 2302 |
(*_node_data)[ni].heap.push(r, rw); |
2303 | 2303 |
(*_node_data)[ni].heap_index.insert(std::make_pair(vt, r)); |
2304 | 2304 |
} |
2305 | 2305 |
|
2306 | 2306 |
if ((*_blossom_set)[n] > (*_node_data)[ni].heap.prio()) { |
2307 | 2307 |
_blossom_set->decrease(n, (*_node_data)[ni].heap.prio()); |
2308 | 2308 |
|
2309 | 2309 |
if (_delta2->state(blossom) != _delta2->IN_HEAP) { |
2310 | 2310 |
_delta2->push(blossom, _blossom_set->classPrio(blossom) - |
2311 | 2311 |
(*_blossom_data)[blossom].offset); |
2312 | 2312 |
} else if ((*_delta2)[blossom] > |
2313 | 2313 |
_blossom_set->classPrio(blossom) - |
2314 | 2314 |
(*_blossom_data)[blossom].offset){ |
2315 | 2315 |
_delta2->decrease(blossom, _blossom_set->classPrio(blossom) - |
2316 | 2316 |
(*_blossom_data)[blossom].offset); |
2317 | 2317 |
} |
2318 | 2318 |
} |
2319 | 2319 |
} |
2320 | 2320 |
} else { |
2321 | 2321 |
|
2322 | 2322 |
typename std::map<int, Arc>::iterator it = |
2323 | 2323 |
(*_node_data)[vi].heap_index.find(tree); |
2324 | 2324 |
|
2325 | 2325 |
if (it != (*_node_data)[vi].heap_index.end()) { |
2326 | 2326 |
(*_node_data)[vi].heap.erase(it->second); |
2327 | 2327 |
(*_node_data)[vi].heap_index.erase(it); |
2328 | 2328 |
if ((*_node_data)[vi].heap.empty()) { |
2329 | 2329 |
_blossom_set->increase(v, std::numeric_limits<Value>::max()); |
2330 | 2330 |
} else if ((*_blossom_set)[v] < (*_node_data)[vi].heap.prio()) { |
2331 | 2331 |
_blossom_set->increase(v, (*_node_data)[vi].heap.prio()); |
2332 | 2332 |
} |
2333 | 2333 |
|
2334 | 2334 |
if ((*_blossom_data)[vb].status == MATCHED) { |
2335 | 2335 |
if (_blossom_set->classPrio(vb) == |
2336 | 2336 |
std::numeric_limits<Value>::max()) { |
2337 | 2337 |
_delta2->erase(vb); |
2338 | 2338 |
} else if ((*_delta2)[vb] < _blossom_set->classPrio(vb) - |
2339 | 2339 |
(*_blossom_data)[vb].offset) { |
2340 | 2340 |
_delta2->increase(vb, _blossom_set->classPrio(vb) - |
2341 | 2341 |
(*_blossom_data)[vb].offset); |
2342 | 2342 |
} |
2343 | 2343 |
} |
2344 | 2344 |
} |
2345 | 2345 |
} |
2346 | 2346 |
} |
2347 | 2347 |
} |
2348 | 2348 |
} |
2349 | 2349 |
|
2350 | 2350 |
void oddToMatched(int blossom) { |
2351 | 2351 |
(*_blossom_data)[blossom].offset -= _delta_sum; |
2352 | 2352 |
|
2353 | 2353 |
if (_blossom_set->classPrio(blossom) != |
2354 | 2354 |
std::numeric_limits<Value>::max()) { |
2355 | 2355 |
_delta2->push(blossom, _blossom_set->classPrio(blossom) - |
2356 | 2356 |
(*_blossom_data)[blossom].offset); |
2357 | 2357 |
} |
2358 | 2358 |
|
2359 | 2359 |
if (!_blossom_set->trivial(blossom)) { |
2360 | 2360 |
_delta4->erase(blossom); |
2361 | 2361 |
} |
2362 | 2362 |
} |
2363 | 2363 |
|
2364 | 2364 |
void oddToEven(int blossom, int tree) { |
2365 | 2365 |
if (!_blossom_set->trivial(blossom)) { |
2366 | 2366 |
_delta4->erase(blossom); |
2367 | 2367 |
(*_blossom_data)[blossom].pot -= |
2368 | 2368 |
2 * (2 * _delta_sum - (*_blossom_data)[blossom].offset); |
2369 | 2369 |
} |
2370 | 2370 |
|
2371 | 2371 |
for (typename BlossomSet::ItemIt n(*_blossom_set, blossom); |
2372 | 2372 |
n != INVALID; ++n) { |
2373 | 2373 |
int ni = (*_node_index)[n]; |
2374 | 2374 |
|
2375 | 2375 |
_blossom_set->increase(n, std::numeric_limits<Value>::max()); |
2376 | 2376 |
|
2377 | 2377 |
(*_node_data)[ni].heap.clear(); |
2378 | 2378 |
(*_node_data)[ni].heap_index.clear(); |
2379 | 2379 |
(*_node_data)[ni].pot += |
2380 | 2380 |
2 * _delta_sum - (*_blossom_data)[blossom].offset; |
2381 | 2381 |
|
2382 | 2382 |
for (InArcIt e(_graph, n); e != INVALID; ++e) { |
2383 | 2383 |
Node v = _graph.source(e); |
2384 | 2384 |
int vb = _blossom_set->find(v); |
2385 | 2385 |
int vi = (*_node_index)[v]; |
2386 | 2386 |
|
2387 | 2387 |
Value rw = (*_node_data)[ni].pot + (*_node_data)[vi].pot - |
2388 | 2388 |
dualScale * _weight[e]; |
2389 | 2389 |
|
2390 | 2390 |
if ((*_blossom_data)[vb].status == EVEN) { |
2391 | 2391 |
if (_delta3->state(e) != _delta3->IN_HEAP && blossom != vb) { |
2392 | 2392 |
_delta3->push(e, rw / 2); |
2393 | 2393 |
} |
2394 | 2394 |
} else { |
2395 | 2395 |
|
2396 | 2396 |
typename std::map<int, Arc>::iterator it = |
2397 | 2397 |
(*_node_data)[vi].heap_index.find(tree); |
2398 | 2398 |
|
2399 | 2399 |
if (it != (*_node_data)[vi].heap_index.end()) { |
2400 | 2400 |
if ((*_node_data)[vi].heap[it->second] > rw) { |
2401 | 2401 |
(*_node_data)[vi].heap.replace(it->second, e); |
2402 | 2402 |
(*_node_data)[vi].heap.decrease(e, rw); |
2403 | 2403 |
it->second = e; |
2404 | 2404 |
} |
2405 | 2405 |
} else { |
2406 | 2406 |
(*_node_data)[vi].heap.push(e, rw); |
2407 | 2407 |
(*_node_data)[vi].heap_index.insert(std::make_pair(tree, e)); |
2408 | 2408 |
} |
2409 | 2409 |
|
2410 | 2410 |
if ((*_blossom_set)[v] > (*_node_data)[vi].heap.prio()) { |
2411 | 2411 |
_blossom_set->decrease(v, (*_node_data)[vi].heap.prio()); |
2412 | 2412 |
|
2413 | 2413 |
if ((*_blossom_data)[vb].status == MATCHED) { |
2414 | 2414 |
if (_delta2->state(vb) != _delta2->IN_HEAP) { |
2415 | 2415 |
_delta2->push(vb, _blossom_set->classPrio(vb) - |
2416 | 2416 |
(*_blossom_data)[vb].offset); |
2417 | 2417 |
} else if ((*_delta2)[vb] > _blossom_set->classPrio(vb) - |
2418 | 2418 |
(*_blossom_data)[vb].offset) { |
2419 | 2419 |
_delta2->decrease(vb, _blossom_set->classPrio(vb) - |
2420 | 2420 |
(*_blossom_data)[vb].offset); |
2421 | 2421 |
} |
2422 | 2422 |
} |
2423 | 2423 |
} |
2424 | 2424 |
} |
2425 | 2425 |
} |
2426 | 2426 |
} |
2427 | 2427 |
(*_blossom_data)[blossom].offset = 0; |
2428 | 2428 |
} |
2429 | 2429 |
|
2430 | 2430 |
void alternatePath(int even, int tree) { |
2431 | 2431 |
int odd; |
2432 | 2432 |
|
2433 | 2433 |
evenToMatched(even, tree); |
2434 | 2434 |
(*_blossom_data)[even].status = MATCHED; |
2435 | 2435 |
|
2436 | 2436 |
while ((*_blossom_data)[even].pred != INVALID) { |
2437 | 2437 |
odd = _blossom_set->find(_graph.target((*_blossom_data)[even].pred)); |
2438 | 2438 |
(*_blossom_data)[odd].status = MATCHED; |
2439 | 2439 |
oddToMatched(odd); |
2440 | 2440 |
(*_blossom_data)[odd].next = (*_blossom_data)[odd].pred; |
2441 | 2441 |
|
2442 | 2442 |
even = _blossom_set->find(_graph.target((*_blossom_data)[odd].pred)); |
2443 | 2443 |
(*_blossom_data)[even].status = MATCHED; |
2444 | 2444 |
evenToMatched(even, tree); |
2445 | 2445 |
(*_blossom_data)[even].next = |
2446 | 2446 |
_graph.oppositeArc((*_blossom_data)[odd].pred); |
2447 | 2447 |
} |
2448 | 2448 |
|
2449 | 2449 |
} |
2450 | 2450 |
|
2451 | 2451 |
void destroyTree(int tree) { |
2452 | 2452 |
for (TreeSet::ItemIt b(*_tree_set, tree); b != INVALID; ++b) { |
2453 | 2453 |
if ((*_blossom_data)[b].status == EVEN) { |
2454 | 2454 |
(*_blossom_data)[b].status = MATCHED; |
2455 | 2455 |
evenToMatched(b, tree); |
2456 | 2456 |
} else if ((*_blossom_data)[b].status == ODD) { |
2457 | 2457 |
(*_blossom_data)[b].status = MATCHED; |
2458 | 2458 |
oddToMatched(b); |
2459 | 2459 |
} |
2460 | 2460 |
} |
2461 | 2461 |
_tree_set->eraseClass(tree); |
2462 | 2462 |
} |
2463 | 2463 |
|
2464 | 2464 |
void augmentOnEdge(const Edge& edge) { |
2465 | 2465 |
|
2466 | 2466 |
int left = _blossom_set->find(_graph.u(edge)); |
2467 | 2467 |
int right = _blossom_set->find(_graph.v(edge)); |
2468 | 2468 |
|
2469 | 2469 |
int left_tree = _tree_set->find(left); |
2470 | 2470 |
alternatePath(left, left_tree); |
2471 | 2471 |
destroyTree(left_tree); |
2472 | 2472 |
|
2473 | 2473 |
int right_tree = _tree_set->find(right); |
2474 | 2474 |
alternatePath(right, right_tree); |
2475 | 2475 |
destroyTree(right_tree); |
2476 | 2476 |
|
2477 | 2477 |
(*_blossom_data)[left].next = _graph.direct(edge, true); |
2478 | 2478 |
(*_blossom_data)[right].next = _graph.direct(edge, false); |
2479 | 2479 |
} |
2480 | 2480 |
|
2481 | 2481 |
void extendOnArc(const Arc& arc) { |
2482 | 2482 |
int base = _blossom_set->find(_graph.target(arc)); |
2483 | 2483 |
int tree = _tree_set->find(base); |
2484 | 2484 |
|
2485 | 2485 |
int odd = _blossom_set->find(_graph.source(arc)); |
2486 | 2486 |
_tree_set->insert(odd, tree); |
2487 | 2487 |
(*_blossom_data)[odd].status = ODD; |
2488 | 2488 |
matchedToOdd(odd); |
2489 | 2489 |
(*_blossom_data)[odd].pred = arc; |
2490 | 2490 |
|
2491 | 2491 |
int even = _blossom_set->find(_graph.target((*_blossom_data)[odd].next)); |
2492 | 2492 |
(*_blossom_data)[even].pred = (*_blossom_data)[even].next; |
2493 | 2493 |
_tree_set->insert(even, tree); |
2494 | 2494 |
(*_blossom_data)[even].status = EVEN; |
2495 | 2495 |
matchedToEven(even, tree); |
2496 | 2496 |
} |
2497 | 2497 |
|
2498 | 2498 |
void shrinkOnEdge(const Edge& edge, int tree) { |
2499 | 2499 |
int nca = -1; |
2500 | 2500 |
std::vector<int> left_path, right_path; |
2501 | 2501 |
|
2502 | 2502 |
{ |
2503 | 2503 |
std::set<int> left_set, right_set; |
2504 | 2504 |
int left = _blossom_set->find(_graph.u(edge)); |
2505 | 2505 |
left_path.push_back(left); |
2506 | 2506 |
left_set.insert(left); |
2507 | 2507 |
|
2508 | 2508 |
int right = _blossom_set->find(_graph.v(edge)); |
2509 | 2509 |
right_path.push_back(right); |
2510 | 2510 |
right_set.insert(right); |
2511 | 2511 |
|
2512 | 2512 |
while (true) { |
2513 | 2513 |
|
2514 | 2514 |
if ((*_blossom_data)[left].pred == INVALID) break; |
2515 | 2515 |
|
2516 | 2516 |
left = |
2517 | 2517 |
_blossom_set->find(_graph.target((*_blossom_data)[left].pred)); |
2518 | 2518 |
left_path.push_back(left); |
2519 | 2519 |
left = |
2520 | 2520 |
_blossom_set->find(_graph.target((*_blossom_data)[left].pred)); |
2521 | 2521 |
left_path.push_back(left); |
2522 | 2522 |
|
2523 | 2523 |
left_set.insert(left); |
2524 | 2524 |
|
2525 | 2525 |
if (right_set.find(left) != right_set.end()) { |
2526 | 2526 |
nca = left; |
2527 | 2527 |
break; |
2528 | 2528 |
} |
2529 | 2529 |
|
2530 | 2530 |
if ((*_blossom_data)[right].pred == INVALID) break; |
2531 | 2531 |
|
2532 | 2532 |
right = |
2533 | 2533 |
_blossom_set->find(_graph.target((*_blossom_data)[right].pred)); |
2534 | 2534 |
right_path.push_back(right); |
2535 | 2535 |
right = |
2536 | 2536 |
_blossom_set->find(_graph.target((*_blossom_data)[right].pred)); |
2537 | 2537 |
right_path.push_back(right); |
2538 | 2538 |
|
2539 | 2539 |
right_set.insert(right); |
2540 | 2540 |
|
2541 | 2541 |
if (left_set.find(right) != left_set.end()) { |
2542 | 2542 |
nca = right; |
2543 | 2543 |
break; |
2544 | 2544 |
} |
2545 | 2545 |
|
2546 | 2546 |
} |
2547 | 2547 |
|
2548 | 2548 |
if (nca == -1) { |
2549 | 2549 |
if ((*_blossom_data)[left].pred == INVALID) { |
2550 | 2550 |
nca = right; |
2551 | 2551 |
while (left_set.find(nca) == left_set.end()) { |
2552 | 2552 |
nca = |
2553 | 2553 |
_blossom_set->find(_graph.target((*_blossom_data)[nca].pred)); |
2554 | 2554 |
right_path.push_back(nca); |
2555 | 2555 |
nca = |
2556 | 2556 |
_blossom_set->find(_graph.target((*_blossom_data)[nca].pred)); |
2557 | 2557 |
right_path.push_back(nca); |
2558 | 2558 |
} |
2559 | 2559 |
} else { |
2560 | 2560 |
nca = left; |
2561 | 2561 |
while (right_set.find(nca) == right_set.end()) { |
2562 | 2562 |
nca = |
2563 | 2563 |
_blossom_set->find(_graph.target((*_blossom_data)[nca].pred)); |
2564 | 2564 |
left_path.push_back(nca); |
2565 | 2565 |
nca = |
2566 | 2566 |
_blossom_set->find(_graph.target((*_blossom_data)[nca].pred)); |
2567 | 2567 |
left_path.push_back(nca); |
2568 | 2568 |
} |
2569 | 2569 |
} |
2570 | 2570 |
} |
2571 | 2571 |
} |
2572 | 2572 |
|
2573 | 2573 |
std::vector<int> subblossoms; |
2574 | 2574 |
Arc prev; |
2575 | 2575 |
|
2576 | 2576 |
prev = _graph.direct(edge, true); |
2577 | 2577 |
for (int i = 0; left_path[i] != nca; i += 2) { |
2578 | 2578 |
subblossoms.push_back(left_path[i]); |
2579 | 2579 |
(*_blossom_data)[left_path[i]].next = prev; |
2580 | 2580 |
_tree_set->erase(left_path[i]); |
2581 | 2581 |
|
2582 | 2582 |
subblossoms.push_back(left_path[i + 1]); |
2583 | 2583 |
(*_blossom_data)[left_path[i + 1]].status = EVEN; |
2584 | 2584 |
oddToEven(left_path[i + 1], tree); |
2585 | 2585 |
_tree_set->erase(left_path[i + 1]); |
2586 | 2586 |
prev = _graph.oppositeArc((*_blossom_data)[left_path[i + 1]].pred); |
2587 | 2587 |
} |
2588 | 2588 |
|
2589 | 2589 |
int k = 0; |
2590 | 2590 |
while (right_path[k] != nca) ++k; |
2591 | 2591 |
|
2592 | 2592 |
subblossoms.push_back(nca); |
2593 | 2593 |
(*_blossom_data)[nca].next = prev; |
2594 | 2594 |
|
2595 | 2595 |
for (int i = k - 2; i >= 0; i -= 2) { |
2596 | 2596 |
subblossoms.push_back(right_path[i + 1]); |
2597 | 2597 |
(*_blossom_data)[right_path[i + 1]].status = EVEN; |
2598 | 2598 |
oddToEven(right_path[i + 1], tree); |
2599 | 2599 |
_tree_set->erase(right_path[i + 1]); |
2600 | 2600 |
|
2601 | 2601 |
(*_blossom_data)[right_path[i + 1]].next = |
2602 | 2602 |
(*_blossom_data)[right_path[i + 1]].pred; |
2603 | 2603 |
|
2604 | 2604 |
subblossoms.push_back(right_path[i]); |
2605 | 2605 |
_tree_set->erase(right_path[i]); |
2606 | 2606 |
} |
2607 | 2607 |
|
2608 | 2608 |
int surface = |
2609 | 2609 |
_blossom_set->join(subblossoms.begin(), subblossoms.end()); |
2610 | 2610 |
|
2611 | 2611 |
for (int i = 0; i < int(subblossoms.size()); ++i) { |
2612 | 2612 |
if (!_blossom_set->trivial(subblossoms[i])) { |
2613 | 2613 |
(*_blossom_data)[subblossoms[i]].pot += 2 * _delta_sum; |
2614 | 2614 |
} |
2615 | 2615 |
(*_blossom_data)[subblossoms[i]].status = MATCHED; |
2616 | 2616 |
} |
2617 | 2617 |
|
2618 | 2618 |
(*_blossom_data)[surface].pot = -2 * _delta_sum; |
2619 | 2619 |
(*_blossom_data)[surface].offset = 0; |
2620 | 2620 |
(*_blossom_data)[surface].status = EVEN; |
2621 | 2621 |
(*_blossom_data)[surface].pred = (*_blossom_data)[nca].pred; |
2622 | 2622 |
(*_blossom_data)[surface].next = (*_blossom_data)[nca].pred; |
2623 | 2623 |
|
2624 | 2624 |
_tree_set->insert(surface, tree); |
2625 | 2625 |
_tree_set->erase(nca); |
2626 | 2626 |
} |
2627 | 2627 |
|
2628 | 2628 |
void splitBlossom(int blossom) { |
2629 | 2629 |
Arc next = (*_blossom_data)[blossom].next; |
2630 | 2630 |
Arc pred = (*_blossom_data)[blossom].pred; |
2631 | 2631 |
|
2632 | 2632 |
int tree = _tree_set->find(blossom); |
2633 | 2633 |
|
2634 | 2634 |
(*_blossom_data)[blossom].status = MATCHED; |
2635 | 2635 |
oddToMatched(blossom); |
2636 | 2636 |
if (_delta2->state(blossom) == _delta2->IN_HEAP) { |
2637 | 2637 |
_delta2->erase(blossom); |
2638 | 2638 |
} |
2639 | 2639 |
|
2640 | 2640 |
std::vector<int> subblossoms; |
2641 | 2641 |
_blossom_set->split(blossom, std::back_inserter(subblossoms)); |
2642 | 2642 |
|
2643 | 2643 |
Value offset = (*_blossom_data)[blossom].offset; |
2644 | 2644 |
int b = _blossom_set->find(_graph.source(pred)); |
2645 | 2645 |
int d = _blossom_set->find(_graph.source(next)); |
2646 | 2646 |
|
2647 | 2647 |
int ib = -1, id = -1; |
2648 | 2648 |
for (int i = 0; i < int(subblossoms.size()); ++i) { |
2649 | 2649 |
if (subblossoms[i] == b) ib = i; |
2650 | 2650 |
if (subblossoms[i] == d) id = i; |
2651 | 2651 |
|
2652 | 2652 |
(*_blossom_data)[subblossoms[i]].offset = offset; |
2653 | 2653 |
if (!_blossom_set->trivial(subblossoms[i])) { |
2654 | 2654 |
(*_blossom_data)[subblossoms[i]].pot -= 2 * offset; |
2655 | 2655 |
} |
2656 | 2656 |
if (_blossom_set->classPrio(subblossoms[i]) != |
2657 | 2657 |
std::numeric_limits<Value>::max()) { |
2658 | 2658 |
_delta2->push(subblossoms[i], |
2659 | 2659 |
_blossom_set->classPrio(subblossoms[i]) - |
2660 | 2660 |
(*_blossom_data)[subblossoms[i]].offset); |
2661 | 2661 |
} |
2662 | 2662 |
} |
2663 | 2663 |
|
2664 | 2664 |
if (id > ib ? ((id - ib) % 2 == 0) : ((ib - id) % 2 == 1)) { |
2665 | 2665 |
for (int i = (id + 1) % subblossoms.size(); |
2666 | 2666 |
i != ib; i = (i + 2) % subblossoms.size()) { |
2667 | 2667 |
int sb = subblossoms[i]; |
2668 | 2668 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
2669 | 2669 |
(*_blossom_data)[sb].next = |
2670 | 2670 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
2671 | 2671 |
} |
2672 | 2672 |
|
2673 | 2673 |
for (int i = ib; i != id; i = (i + 2) % subblossoms.size()) { |
2674 | 2674 |
int sb = subblossoms[i]; |
2675 | 2675 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
2676 | 2676 |
int ub = subblossoms[(i + 2) % subblossoms.size()]; |
2677 | 2677 |
|
2678 | 2678 |
(*_blossom_data)[sb].status = ODD; |
2679 | 2679 |
matchedToOdd(sb); |
2680 | 2680 |
_tree_set->insert(sb, tree); |
2681 | 2681 |
(*_blossom_data)[sb].pred = pred; |
2682 | 2682 |
(*_blossom_data)[sb].next = |
2683 | 2683 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
2684 | 2684 |
|
2685 | 2685 |
pred = (*_blossom_data)[ub].next; |
2686 | 2686 |
|
2687 | 2687 |
(*_blossom_data)[tb].status = EVEN; |
2688 | 2688 |
matchedToEven(tb, tree); |
2689 | 2689 |
_tree_set->insert(tb, tree); |
2690 | 2690 |
(*_blossom_data)[tb].pred = (*_blossom_data)[tb].next; |
2691 | 2691 |
} |
2692 | 2692 |
|
2693 | 2693 |
(*_blossom_data)[subblossoms[id]].status = ODD; |
2694 | 2694 |
matchedToOdd(subblossoms[id]); |
2695 | 2695 |
_tree_set->insert(subblossoms[id], tree); |
2696 | 2696 |
(*_blossom_data)[subblossoms[id]].next = next; |
2697 | 2697 |
(*_blossom_data)[subblossoms[id]].pred = pred; |
2698 | 2698 |
|
2699 | 2699 |
} else { |
2700 | 2700 |
|
2701 | 2701 |
for (int i = (ib + 1) % subblossoms.size(); |
2702 | 2702 |
i != id; i = (i + 2) % subblossoms.size()) { |
2703 | 2703 |
int sb = subblossoms[i]; |
2704 | 2704 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
2705 | 2705 |
(*_blossom_data)[sb].next = |
2706 | 2706 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
2707 | 2707 |
} |
2708 | 2708 |
|
2709 | 2709 |
for (int i = id; i != ib; i = (i + 2) % subblossoms.size()) { |
2710 | 2710 |
int sb = subblossoms[i]; |
2711 | 2711 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
2712 | 2712 |
int ub = subblossoms[(i + 2) % subblossoms.size()]; |
2713 | 2713 |
|
2714 | 2714 |
(*_blossom_data)[sb].status = ODD; |
2715 | 2715 |
matchedToOdd(sb); |
2716 | 2716 |
_tree_set->insert(sb, tree); |
2717 | 2717 |
(*_blossom_data)[sb].next = next; |
2718 | 2718 |
(*_blossom_data)[sb].pred = |
2719 | 2719 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
2720 | 2720 |
|
2721 | 2721 |
(*_blossom_data)[tb].status = EVEN; |
2722 | 2722 |
matchedToEven(tb, tree); |
2723 | 2723 |
_tree_set->insert(tb, tree); |
2724 | 2724 |
(*_blossom_data)[tb].pred = |
2725 | 2725 |
(*_blossom_data)[tb].next = |
2726 | 2726 |
_graph.oppositeArc((*_blossom_data)[ub].next); |
2727 | 2727 |
next = (*_blossom_data)[ub].next; |
2728 | 2728 |
} |
2729 | 2729 |
|
2730 | 2730 |
(*_blossom_data)[subblossoms[ib]].status = ODD; |
2731 | 2731 |
matchedToOdd(subblossoms[ib]); |
2732 | 2732 |
_tree_set->insert(subblossoms[ib], tree); |
2733 | 2733 |
(*_blossom_data)[subblossoms[ib]].next = next; |
2734 | 2734 |
(*_blossom_data)[subblossoms[ib]].pred = pred; |
2735 | 2735 |
} |
2736 | 2736 |
_tree_set->erase(blossom); |
2737 | 2737 |
} |
2738 | 2738 |
|
2739 | 2739 |
void extractBlossom(int blossom, const Node& base, const Arc& matching) { |
2740 | 2740 |
if (_blossom_set->trivial(blossom)) { |
2741 | 2741 |
int bi = (*_node_index)[base]; |
2742 | 2742 |
Value pot = (*_node_data)[bi].pot; |
2743 | 2743 |
|
2744 |
_matching |
|
2744 |
(*_matching)[base] = matching; |
|
2745 | 2745 |
_blossom_node_list.push_back(base); |
2746 |
_node_potential |
|
2746 |
(*_node_potential)[base] = pot; |
|
2747 | 2747 |
} else { |
2748 | 2748 |
|
2749 | 2749 |
Value pot = (*_blossom_data)[blossom].pot; |
2750 | 2750 |
int bn = _blossom_node_list.size(); |
2751 | 2751 |
|
2752 | 2752 |
std::vector<int> subblossoms; |
2753 | 2753 |
_blossom_set->split(blossom, std::back_inserter(subblossoms)); |
2754 | 2754 |
int b = _blossom_set->find(base); |
2755 | 2755 |
int ib = -1; |
2756 | 2756 |
for (int i = 0; i < int(subblossoms.size()); ++i) { |
2757 | 2757 |
if (subblossoms[i] == b) { ib = i; break; } |
2758 | 2758 |
} |
2759 | 2759 |
|
2760 | 2760 |
for (int i = 1; i < int(subblossoms.size()); i += 2) { |
2761 | 2761 |
int sb = subblossoms[(ib + i) % subblossoms.size()]; |
2762 | 2762 |
int tb = subblossoms[(ib + i + 1) % subblossoms.size()]; |
2763 | 2763 |
|
2764 | 2764 |
Arc m = (*_blossom_data)[tb].next; |
2765 | 2765 |
extractBlossom(sb, _graph.target(m), _graph.oppositeArc(m)); |
2766 | 2766 |
extractBlossom(tb, _graph.source(m), m); |
2767 | 2767 |
} |
2768 | 2768 |
extractBlossom(subblossoms[ib], base, matching); |
2769 | 2769 |
|
2770 | 2770 |
int en = _blossom_node_list.size(); |
2771 | 2771 |
|
2772 | 2772 |
_blossom_potential.push_back(BlossomVariable(bn, en, pot)); |
2773 | 2773 |
} |
2774 | 2774 |
} |
2775 | 2775 |
|
2776 | 2776 |
void extractMatching() { |
2777 | 2777 |
std::vector<int> blossoms; |
2778 | 2778 |
for (typename BlossomSet::ClassIt c(*_blossom_set); c != INVALID; ++c) { |
2779 | 2779 |
blossoms.push_back(c); |
2780 | 2780 |
} |
2781 | 2781 |
|
2782 | 2782 |
for (int i = 0; i < int(blossoms.size()); ++i) { |
2783 | 2783 |
|
2784 | 2784 |
Value offset = (*_blossom_data)[blossoms[i]].offset; |
2785 | 2785 |
(*_blossom_data)[blossoms[i]].pot += 2 * offset; |
2786 | 2786 |
for (typename BlossomSet::ItemIt n(*_blossom_set, blossoms[i]); |
2787 | 2787 |
n != INVALID; ++n) { |
2788 | 2788 |
(*_node_data)[(*_node_index)[n]].pot -= offset; |
2789 | 2789 |
} |
2790 | 2790 |
|
2791 | 2791 |
Arc matching = (*_blossom_data)[blossoms[i]].next; |
2792 | 2792 |
Node base = _graph.source(matching); |
2793 | 2793 |
extractBlossom(blossoms[i], base, matching); |
2794 | 2794 |
} |
2795 | 2795 |
} |
2796 | 2796 |
|
2797 | 2797 |
public: |
2798 | 2798 |
|
2799 | 2799 |
/// \brief Constructor |
2800 | 2800 |
/// |
2801 | 2801 |
/// Constructor. |
2802 | 2802 |
MaxWeightedPerfectMatching(const Graph& graph, const WeightMap& weight) |
2803 | 2803 |
: _graph(graph), _weight(weight), _matching(0), |
2804 | 2804 |
_node_potential(0), _blossom_potential(), _blossom_node_list(), |
2805 | 2805 |
_node_num(0), _blossom_num(0), |
2806 | 2806 |
|
2807 | 2807 |
_blossom_index(0), _blossom_set(0), _blossom_data(0), |
2808 | 2808 |
_node_index(0), _node_heap_index(0), _node_data(0), |
2809 | 2809 |
_tree_set_index(0), _tree_set(0), |
2810 | 2810 |
|
2811 | 2811 |
_delta2_index(0), _delta2(0), |
2812 | 2812 |
_delta3_index(0), _delta3(0), |
2813 | 2813 |
_delta4_index(0), _delta4(0), |
2814 | 2814 |
|
2815 | 2815 |
_delta_sum() {} |
2816 | 2816 |
|
2817 | 2817 |
~MaxWeightedPerfectMatching() { |
2818 | 2818 |
destroyStructures(); |
2819 | 2819 |
} |
2820 | 2820 |
|
2821 | 2821 |
/// \name Execution control |
2822 | 2822 |
/// The simplest way to execute the algorithm is to use the |
2823 | 2823 |
/// \c run() member function. |
2824 | 2824 |
|
2825 | 2825 |
///@{ |
2826 | 2826 |
|
2827 | 2827 |
/// \brief Initialize the algorithm |
2828 | 2828 |
/// |
2829 | 2829 |
/// Initialize the algorithm |
2830 | 2830 |
void init() { |
2831 | 2831 |
createStructures(); |
2832 | 2832 |
|
2833 | 2833 |
for (ArcIt e(_graph); e != INVALID; ++e) { |
2834 |
_node_heap_index |
|
2834 |
(*_node_heap_index)[e] = BinHeap<Value, IntArcMap>::PRE_HEAP; |
|
2835 | 2835 |
} |
2836 | 2836 |
for (EdgeIt e(_graph); e != INVALID; ++e) { |
2837 |
_delta3_index |
|
2837 |
(*_delta3_index)[e] = _delta3->PRE_HEAP; |
|
2838 | 2838 |
} |
2839 | 2839 |
for (int i = 0; i < _blossom_num; ++i) { |
2840 |
_delta2_index->set(i, _delta2->PRE_HEAP); |
|
2841 |
_delta4_index->set(i, _delta4->PRE_HEAP); |
|
2840 |
(*_delta2_index)[i] = _delta2->PRE_HEAP; |
|
2841 |
(*_delta4_index)[i] = _delta4->PRE_HEAP; |
|
2842 | 2842 |
} |
2843 | 2843 |
|
2844 | 2844 |
int index = 0; |
2845 | 2845 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
2846 | 2846 |
Value max = - std::numeric_limits<Value>::max(); |
2847 | 2847 |
for (OutArcIt e(_graph, n); e != INVALID; ++e) { |
2848 | 2848 |
if (_graph.target(e) == n) continue; |
2849 | 2849 |
if ((dualScale * _weight[e]) / 2 > max) { |
2850 | 2850 |
max = (dualScale * _weight[e]) / 2; |
2851 | 2851 |
} |
2852 | 2852 |
} |
2853 |
_node_index |
|
2853 |
(*_node_index)[n] = index; |
|
2854 | 2854 |
(*_node_data)[index].pot = max; |
2855 | 2855 |
int blossom = |
2856 | 2856 |
_blossom_set->insert(n, std::numeric_limits<Value>::max()); |
2857 | 2857 |
|
2858 | 2858 |
_tree_set->insert(blossom); |
2859 | 2859 |
|
2860 | 2860 |
(*_blossom_data)[blossom].status = EVEN; |
2861 | 2861 |
(*_blossom_data)[blossom].pred = INVALID; |
2862 | 2862 |
(*_blossom_data)[blossom].next = INVALID; |
2863 | 2863 |
(*_blossom_data)[blossom].pot = 0; |
2864 | 2864 |
(*_blossom_data)[blossom].offset = 0; |
2865 | 2865 |
++index; |
2866 | 2866 |
} |
2867 | 2867 |
for (EdgeIt e(_graph); e != INVALID; ++e) { |
2868 | 2868 |
int si = (*_node_index)[_graph.u(e)]; |
2869 | 2869 |
int ti = (*_node_index)[_graph.v(e)]; |
2870 | 2870 |
if (_graph.u(e) != _graph.v(e)) { |
2871 | 2871 |
_delta3->push(e, ((*_node_data)[si].pot + (*_node_data)[ti].pot - |
2872 | 2872 |
dualScale * _weight[e]) / 2); |
2873 | 2873 |
} |
2874 | 2874 |
} |
2875 | 2875 |
} |
2876 | 2876 |
|
2877 | 2877 |
/// \brief Starts the algorithm |
2878 | 2878 |
/// |
2879 | 2879 |
/// Starts the algorithm |
2880 | 2880 |
bool start() { |
2881 | 2881 |
enum OpType { |
2882 | 2882 |
D2, D3, D4 |
2883 | 2883 |
}; |
2884 | 2884 |
|
2885 | 2885 |
int unmatched = _node_num; |
2886 | 2886 |
while (unmatched > 0) { |
2887 | 2887 |
Value d2 = !_delta2->empty() ? |
2888 | 2888 |
_delta2->prio() : std::numeric_limits<Value>::max(); |
2889 | 2889 |
|
2890 | 2890 |
Value d3 = !_delta3->empty() ? |
2891 | 2891 |
_delta3->prio() : std::numeric_limits<Value>::max(); |
2892 | 2892 |
|
2893 | 2893 |
Value d4 = !_delta4->empty() ? |
2894 | 2894 |
_delta4->prio() : std::numeric_limits<Value>::max(); |
2895 | 2895 |
|
2896 | 2896 |
_delta_sum = d2; OpType ot = D2; |
2897 | 2897 |
if (d3 < _delta_sum) { _delta_sum = d3; ot = D3; } |
2898 | 2898 |
if (d4 < _delta_sum) { _delta_sum = d4; ot = D4; } |
2899 | 2899 |
|
2900 | 2900 |
if (_delta_sum == std::numeric_limits<Value>::max()) { |
2901 | 2901 |
return false; |
2902 | 2902 |
} |
2903 | 2903 |
|
2904 | 2904 |
switch (ot) { |
2905 | 2905 |
case D2: |
2906 | 2906 |
{ |
2907 | 2907 |
int blossom = _delta2->top(); |
2908 | 2908 |
Node n = _blossom_set->classTop(blossom); |
2909 | 2909 |
Arc e = (*_node_data)[(*_node_index)[n]].heap.top(); |
2910 | 2910 |
extendOnArc(e); |
2911 | 2911 |
} |
2912 | 2912 |
break; |
2913 | 2913 |
case D3: |
2914 | 2914 |
{ |
2915 | 2915 |
Edge e = _delta3->top(); |
2916 | 2916 |
|
2917 | 2917 |
int left_blossom = _blossom_set->find(_graph.u(e)); |
2918 | 2918 |
int right_blossom = _blossom_set->find(_graph.v(e)); |
2919 | 2919 |
|
2920 | 2920 |
if (left_blossom == right_blossom) { |
2921 | 2921 |
_delta3->pop(); |
2922 | 2922 |
} else { |
2923 | 2923 |
int left_tree = _tree_set->find(left_blossom); |
2924 | 2924 |
int right_tree = _tree_set->find(right_blossom); |
2925 | 2925 |
|
2926 | 2926 |
if (left_tree == right_tree) { |
2927 | 2927 |
shrinkOnEdge(e, left_tree); |
2928 | 2928 |
} else { |
2929 | 2929 |
augmentOnEdge(e); |
2930 | 2930 |
unmatched -= 2; |
2931 | 2931 |
} |
2932 | 2932 |
} |
2933 | 2933 |
} break; |
2934 | 2934 |
case D4: |
2935 | 2935 |
splitBlossom(_delta4->top()); |
2936 | 2936 |
break; |
2937 | 2937 |
} |
2938 | 2938 |
} |
2939 | 2939 |
extractMatching(); |
2940 | 2940 |
return true; |
2941 | 2941 |
} |
2942 | 2942 |
|
2943 | 2943 |
/// \brief Runs %MaxWeightedPerfectMatching algorithm. |
2944 | 2944 |
/// |
2945 | 2945 |
/// This method runs the %MaxWeightedPerfectMatching algorithm. |
2946 | 2946 |
/// |
2947 | 2947 |
/// \note mwm.run() is just a shortcut of the following code. |
2948 | 2948 |
/// \code |
2949 | 2949 |
/// mwm.init(); |
2950 | 2950 |
/// mwm.start(); |
2951 | 2951 |
/// \endcode |
2952 | 2952 |
bool run() { |
2953 | 2953 |
init(); |
2954 | 2954 |
return start(); |
2955 | 2955 |
} |
2956 | 2956 |
|
2957 | 2957 |
/// @} |
2958 | 2958 |
|
2959 | 2959 |
/// \name Primal solution |
2960 | 2960 |
/// Functions to get the primal solution, ie. the matching. |
2961 | 2961 |
|
2962 | 2962 |
/// @{ |
2963 | 2963 |
|
2964 | 2964 |
/// \brief Returns the matching value. |
2965 | 2965 |
/// |
2966 | 2966 |
/// Returns the matching value. |
2967 | 2967 |
Value matchingValue() const { |
2968 | 2968 |
Value sum = 0; |
2969 | 2969 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
2970 | 2970 |
if ((*_matching)[n] != INVALID) { |
2971 | 2971 |
sum += _weight[(*_matching)[n]]; |
2972 | 2972 |
} |
2973 | 2973 |
} |
2974 | 2974 |
return sum /= 2; |
2975 | 2975 |
} |
2976 | 2976 |
|
2977 | 2977 |
/// \brief Returns true when the edge is in the matching. |
2978 | 2978 |
/// |
2979 | 2979 |
/// Returns true when the edge is in the matching. |
2980 | 2980 |
bool matching(const Edge& edge) const { |
2981 | 2981 |
return static_cast<const Edge&>((*_matching)[_graph.u(edge)]) == edge; |
2982 | 2982 |
} |
2983 | 2983 |
|
2984 | 2984 |
/// \brief Returns the incident matching edge. |
2985 | 2985 |
/// |
2986 | 2986 |
/// Returns the incident matching arc from given edge. |
2987 | 2987 |
Arc matching(const Node& node) const { |
2988 | 2988 |
return (*_matching)[node]; |
2989 | 2989 |
} |
2990 | 2990 |
|
2991 | 2991 |
/// \brief Returns the mate of the node. |
2992 | 2992 |
/// |
2993 | 2993 |
/// Returns the adjancent node in a mathcing arc. |
2994 | 2994 |
Node mate(const Node& node) const { |
2995 | 2995 |
return _graph.target((*_matching)[node]); |
2996 | 2996 |
} |
2997 | 2997 |
|
2998 | 2998 |
/// @} |
2999 | 2999 |
|
3000 | 3000 |
/// \name Dual solution |
3001 | 3001 |
/// Functions to get the dual solution. |
3002 | 3002 |
|
3003 | 3003 |
/// @{ |
3004 | 3004 |
|
3005 | 3005 |
/// \brief Returns the value of the dual solution. |
3006 | 3006 |
/// |
3007 | 3007 |
/// Returns the value of the dual solution. It should be equal to |
3008 | 3008 |
/// the primal value scaled by \ref dualScale "dual scale". |
3009 | 3009 |
Value dualValue() const { |
3010 | 3010 |
Value sum = 0; |
3011 | 3011 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
3012 | 3012 |
sum += nodeValue(n); |
3013 | 3013 |
} |
3014 | 3014 |
for (int i = 0; i < blossomNum(); ++i) { |
3015 | 3015 |
sum += blossomValue(i) * (blossomSize(i) / 2); |
3016 | 3016 |
} |
3017 | 3017 |
return sum; |
3018 | 3018 |
} |
3019 | 3019 |
|
3020 | 3020 |
/// \brief Returns the value of the node. |
3021 | 3021 |
/// |
3022 | 3022 |
/// Returns the the value of the node. |
3023 | 3023 |
Value nodeValue(const Node& n) const { |
3024 | 3024 |
return (*_node_potential)[n]; |
3025 | 3025 |
} |
3026 | 3026 |
|
3027 | 3027 |
/// \brief Returns the number of the blossoms in the basis. |
3028 | 3028 |
/// |
3029 | 3029 |
/// Returns the number of the blossoms in the basis. |
3030 | 3030 |
/// \see BlossomIt |
3031 | 3031 |
int blossomNum() const { |
3032 | 3032 |
return _blossom_potential.size(); |
3033 | 3033 |
} |
3034 | 3034 |
|
3035 | 3035 |
|
3036 | 3036 |
/// \brief Returns the number of the nodes in the blossom. |
3037 | 3037 |
/// |
3038 | 3038 |
/// Returns the number of the nodes in the blossom. |
3039 | 3039 |
int blossomSize(int k) const { |
3040 | 3040 |
return _blossom_potential[k].end - _blossom_potential[k].begin; |
3041 | 3041 |
} |
3042 | 3042 |
|
3043 | 3043 |
/// \brief Returns the value of the blossom. |
3044 | 3044 |
/// |
3045 | 3045 |
/// Returns the the value of the blossom. |
3046 | 3046 |
/// \see BlossomIt |
3047 | 3047 |
Value blossomValue(int k) const { |
3048 | 3048 |
return _blossom_potential[k].value; |
3049 | 3049 |
} |
3050 | 3050 |
|
3051 | 3051 |
/// \brief Iterator for obtaining the nodes of the blossom. |
3052 | 3052 |
/// |
3053 | 3053 |
/// Iterator for obtaining the nodes of the blossom. This class |
3054 | 3054 |
/// provides a common lemon style iterator for listing a |
3055 | 3055 |
/// subset of the nodes. |
3056 | 3056 |
class BlossomIt { |
3057 | 3057 |
public: |
3058 | 3058 |
|
3059 | 3059 |
/// \brief Constructor. |
3060 | 3060 |
/// |
3061 | 3061 |
/// Constructor to get the nodes of the variable. |
3062 | 3062 |
BlossomIt(const MaxWeightedPerfectMatching& algorithm, int variable) |
3063 | 3063 |
: _algorithm(&algorithm) |
3064 | 3064 |
{ |
3065 | 3065 |
_index = _algorithm->_blossom_potential[variable].begin; |
3066 | 3066 |
_last = _algorithm->_blossom_potential[variable].end; |
3067 | 3067 |
} |
3068 | 3068 |
|
3069 | 3069 |
/// \brief Conversion to node. |
3070 | 3070 |
/// |
3071 | 3071 |
/// Conversion to node. |
3072 | 3072 |
operator Node() const { |
3073 | 3073 |
return _algorithm->_blossom_node_list[_index]; |
3074 | 3074 |
} |
3075 | 3075 |
|
3076 | 3076 |
/// \brief Increment operator. |
3077 | 3077 |
/// |
3078 | 3078 |
/// Increment operator. |
3079 | 3079 |
BlossomIt& operator++() { |
3080 | 3080 |
++_index; |
3081 | 3081 |
return *this; |
3082 | 3082 |
} |
3083 | 3083 |
|
3084 | 3084 |
/// \brief Validity checking |
3085 | 3085 |
/// |
3086 | 3086 |
/// Checks whether the iterator is invalid. |
3087 | 3087 |
bool operator==(Invalid) const { return _index == _last; } |
3088 | 3088 |
|
3089 | 3089 |
/// \brief Validity checking |
3090 | 3090 |
/// |
3091 | 3091 |
/// Checks whether the iterator is valid. |
3092 | 3092 |
bool operator!=(Invalid) const { return _index != _last; } |
3093 | 3093 |
|
3094 | 3094 |
private: |
3095 | 3095 |
const MaxWeightedPerfectMatching* _algorithm; |
3096 | 3096 |
int _last; |
3097 | 3097 |
int _index; |
3098 | 3098 |
}; |
3099 | 3099 |
|
3100 | 3100 |
/// @} |
3101 | 3101 |
|
3102 | 3102 |
}; |
3103 | 3103 |
|
3104 | 3104 |
|
3105 | 3105 |
} //END OF NAMESPACE LEMON |
3106 | 3106 |
|
3107 | 3107 |
#endif //LEMON_MAX_MATCHING_H |
Changeset was too big and was cut off... Show full diff
0 comments (0 inline)