0
9
0
17
17
55
55
73
73
14
14
89
89
50
50
31
31
10
10
... | ... |
@@ -360,319 +360,319 @@ |
360 | 360 |
} |
361 | 361 |
|
362 | 362 |
/// Sets the upper bound capacity map. |
363 | 363 |
|
364 | 364 |
/// Sets the upper bound capacity map. |
365 | 365 |
/// \return <tt>(*this)</tt> |
366 | 366 |
Circulation& upperCapMap(const LCapMap& map) { |
367 | 367 |
_up = ↦ |
368 | 368 |
return *this; |
369 | 369 |
} |
370 | 370 |
|
371 | 371 |
/// Sets the lower bound map for the supply of the nodes. |
372 | 372 |
|
373 | 373 |
/// Sets the lower bound map for the supply of the nodes. |
374 | 374 |
/// \return <tt>(*this)</tt> |
375 | 375 |
Circulation& deltaMap(const DeltaMap& map) { |
376 | 376 |
_delta = ↦ |
377 | 377 |
return *this; |
378 | 378 |
} |
379 | 379 |
|
380 | 380 |
/// \brief Sets the flow map. |
381 | 381 |
/// |
382 | 382 |
/// Sets the flow map. |
383 | 383 |
/// If you don't use this function before calling \ref run() or |
384 | 384 |
/// \ref init(), an instance will be allocated automatically. |
385 | 385 |
/// The destructor deallocates this automatically allocated map, |
386 | 386 |
/// of course. |
387 | 387 |
/// \return <tt>(*this)</tt> |
388 | 388 |
Circulation& flowMap(FlowMap& map) { |
389 | 389 |
if (_local_flow) { |
390 | 390 |
delete _flow; |
391 | 391 |
_local_flow = false; |
392 | 392 |
} |
393 | 393 |
_flow = ↦ |
394 | 394 |
return *this; |
395 | 395 |
} |
396 | 396 |
|
397 | 397 |
/// \brief Sets the elevator used by algorithm. |
398 | 398 |
/// |
399 | 399 |
/// Sets the elevator used by algorithm. |
400 | 400 |
/// If you don't use this function before calling \ref run() or |
401 | 401 |
/// \ref init(), an instance will be allocated automatically. |
402 | 402 |
/// The destructor deallocates this automatically allocated elevator, |
403 | 403 |
/// of course. |
404 | 404 |
/// \return <tt>(*this)</tt> |
405 | 405 |
Circulation& elevator(Elevator& elevator) { |
406 | 406 |
if (_local_level) { |
407 | 407 |
delete _level; |
408 | 408 |
_local_level = false; |
409 | 409 |
} |
410 | 410 |
_level = &elevator; |
411 | 411 |
return *this; |
412 | 412 |
} |
413 | 413 |
|
414 | 414 |
/// \brief Returns a const reference to the elevator. |
415 | 415 |
/// |
416 | 416 |
/// Returns a const reference to the elevator. |
417 | 417 |
/// |
418 | 418 |
/// \pre Either \ref run() or \ref init() must be called before |
419 | 419 |
/// using this function. |
420 | 420 |
const Elevator& elevator() const { |
421 | 421 |
return *_level; |
422 | 422 |
} |
423 | 423 |
|
424 | 424 |
/// \brief Sets the tolerance used by algorithm. |
425 | 425 |
/// |
426 | 426 |
/// Sets the tolerance used by algorithm. |
427 | 427 |
Circulation& tolerance(const Tolerance& tolerance) const { |
428 | 428 |
_tol = tolerance; |
429 | 429 |
return *this; |
430 | 430 |
} |
431 | 431 |
|
432 | 432 |
/// \brief Returns a const reference to the tolerance. |
433 | 433 |
/// |
434 | 434 |
/// Returns a const reference to the tolerance. |
435 | 435 |
const Tolerance& tolerance() const { |
436 | 436 |
return tolerance; |
437 | 437 |
} |
438 | 438 |
|
439 | 439 |
/// \name Execution Control |
440 | 440 |
/// The simplest way to execute the algorithm is to call \ref run().\n |
441 | 441 |
/// If you need more control on the initial solution or the execution, |
442 | 442 |
/// first you have to call one of the \ref init() functions, then |
443 | 443 |
/// the \ref start() function. |
444 | 444 |
|
445 | 445 |
///@{ |
446 | 446 |
|
447 | 447 |
/// Initializes the internal data structures. |
448 | 448 |
|
449 | 449 |
/// Initializes the internal data structures and sets all flow values |
450 | 450 |
/// to the lower bound. |
451 | 451 |
void init() |
452 | 452 |
{ |
453 | 453 |
createStructures(); |
454 | 454 |
|
455 | 455 |
for(NodeIt n(_g);n!=INVALID;++n) { |
456 |
_excess |
|
456 |
(*_excess)[n] = (*_delta)[n]; |
|
457 | 457 |
} |
458 | 458 |
|
459 | 459 |
for (ArcIt e(_g);e!=INVALID;++e) { |
460 | 460 |
_flow->set(e, (*_lo)[e]); |
461 |
_excess->set(_g.target(e), (*_excess)[_g.target(e)] + (*_flow)[e]); |
|
462 |
_excess->set(_g.source(e), (*_excess)[_g.source(e)] - (*_flow)[e]); |
|
461 |
(*_excess)[_g.target(e)] += (*_flow)[e]; |
|
462 |
(*_excess)[_g.source(e)] -= (*_flow)[e]; |
|
463 | 463 |
} |
464 | 464 |
|
465 | 465 |
// global relabeling tested, but in general case it provides |
466 | 466 |
// worse performance for random digraphs |
467 | 467 |
_level->initStart(); |
468 | 468 |
for(NodeIt n(_g);n!=INVALID;++n) |
469 | 469 |
_level->initAddItem(n); |
470 | 470 |
_level->initFinish(); |
471 | 471 |
for(NodeIt n(_g);n!=INVALID;++n) |
472 | 472 |
if(_tol.positive((*_excess)[n])) |
473 | 473 |
_level->activate(n); |
474 | 474 |
} |
475 | 475 |
|
476 | 476 |
/// Initializes the internal data structures using a greedy approach. |
477 | 477 |
|
478 | 478 |
/// Initializes the internal data structures using a greedy approach |
479 | 479 |
/// to construct the initial solution. |
480 | 480 |
void greedyInit() |
481 | 481 |
{ |
482 | 482 |
createStructures(); |
483 | 483 |
|
484 | 484 |
for(NodeIt n(_g);n!=INVALID;++n) { |
485 |
_excess |
|
485 |
(*_excess)[n] = (*_delta)[n]; |
|
486 | 486 |
} |
487 | 487 |
|
488 | 488 |
for (ArcIt e(_g);e!=INVALID;++e) { |
489 | 489 |
if (!_tol.positive((*_excess)[_g.target(e)] + (*_up)[e])) { |
490 | 490 |
_flow->set(e, (*_up)[e]); |
491 |
_excess->set(_g.target(e), (*_excess)[_g.target(e)] + (*_up)[e]); |
|
492 |
_excess->set(_g.source(e), (*_excess)[_g.source(e)] - (*_up)[e]); |
|
491 |
(*_excess)[_g.target(e)] += (*_up)[e]; |
|
492 |
(*_excess)[_g.source(e)] -= (*_up)[e]; |
|
493 | 493 |
} else if (_tol.positive((*_excess)[_g.target(e)] + (*_lo)[e])) { |
494 | 494 |
_flow->set(e, (*_lo)[e]); |
495 |
_excess->set(_g.target(e), (*_excess)[_g.target(e)] + (*_lo)[e]); |
|
496 |
_excess->set(_g.source(e), (*_excess)[_g.source(e)] - (*_lo)[e]); |
|
495 |
(*_excess)[_g.target(e)] += (*_lo)[e]; |
|
496 |
(*_excess)[_g.source(e)] -= (*_lo)[e]; |
|
497 | 497 |
} else { |
498 | 498 |
Value fc = -(*_excess)[_g.target(e)]; |
499 | 499 |
_flow->set(e, fc); |
500 |
_excess->set(_g.target(e), 0); |
|
501 |
_excess->set(_g.source(e), (*_excess)[_g.source(e)] - fc); |
|
500 |
(*_excess)[_g.target(e)] = 0; |
|
501 |
(*_excess)[_g.source(e)] -= fc; |
|
502 | 502 |
} |
503 | 503 |
} |
504 | 504 |
|
505 | 505 |
_level->initStart(); |
506 | 506 |
for(NodeIt n(_g);n!=INVALID;++n) |
507 | 507 |
_level->initAddItem(n); |
508 | 508 |
_level->initFinish(); |
509 | 509 |
for(NodeIt n(_g);n!=INVALID;++n) |
510 | 510 |
if(_tol.positive((*_excess)[n])) |
511 | 511 |
_level->activate(n); |
512 | 512 |
} |
513 | 513 |
|
514 | 514 |
///Executes the algorithm |
515 | 515 |
|
516 | 516 |
///This function executes the algorithm. |
517 | 517 |
/// |
518 | 518 |
///\return \c true if a feasible circulation is found. |
519 | 519 |
/// |
520 | 520 |
///\sa barrier() |
521 | 521 |
///\sa barrierMap() |
522 | 522 |
bool start() |
523 | 523 |
{ |
524 | 524 |
|
525 | 525 |
Node act; |
526 | 526 |
Node bact=INVALID; |
527 | 527 |
Node last_activated=INVALID; |
528 | 528 |
while((act=_level->highestActive())!=INVALID) { |
529 | 529 |
int actlevel=(*_level)[act]; |
530 | 530 |
int mlevel=_node_num; |
531 | 531 |
Value exc=(*_excess)[act]; |
532 | 532 |
|
533 | 533 |
for(OutArcIt e(_g,act);e!=INVALID; ++e) { |
534 | 534 |
Node v = _g.target(e); |
535 | 535 |
Value fc=(*_up)[e]-(*_flow)[e]; |
536 | 536 |
if(!_tol.positive(fc)) continue; |
537 | 537 |
if((*_level)[v]<actlevel) { |
538 | 538 |
if(!_tol.less(fc, exc)) { |
539 | 539 |
_flow->set(e, (*_flow)[e] + exc); |
540 |
|
|
540 |
(*_excess)[v] += exc; |
|
541 | 541 |
if(!_level->active(v) && _tol.positive((*_excess)[v])) |
542 | 542 |
_level->activate(v); |
543 |
|
|
543 |
(*_excess)[act] = 0; |
|
544 | 544 |
_level->deactivate(act); |
545 | 545 |
goto next_l; |
546 | 546 |
} |
547 | 547 |
else { |
548 | 548 |
_flow->set(e, (*_up)[e]); |
549 |
|
|
549 |
(*_excess)[v] += fc; |
|
550 | 550 |
if(!_level->active(v) && _tol.positive((*_excess)[v])) |
551 | 551 |
_level->activate(v); |
552 | 552 |
exc-=fc; |
553 | 553 |
} |
554 | 554 |
} |
555 | 555 |
else if((*_level)[v]<mlevel) mlevel=(*_level)[v]; |
556 | 556 |
} |
557 | 557 |
for(InArcIt e(_g,act);e!=INVALID; ++e) { |
558 | 558 |
Node v = _g.source(e); |
559 | 559 |
Value fc=(*_flow)[e]-(*_lo)[e]; |
560 | 560 |
if(!_tol.positive(fc)) continue; |
561 | 561 |
if((*_level)[v]<actlevel) { |
562 | 562 |
if(!_tol.less(fc, exc)) { |
563 | 563 |
_flow->set(e, (*_flow)[e] - exc); |
564 |
|
|
564 |
(*_excess)[v] += exc; |
|
565 | 565 |
if(!_level->active(v) && _tol.positive((*_excess)[v])) |
566 | 566 |
_level->activate(v); |
567 |
|
|
567 |
(*_excess)[act] = 0; |
|
568 | 568 |
_level->deactivate(act); |
569 | 569 |
goto next_l; |
570 | 570 |
} |
571 | 571 |
else { |
572 | 572 |
_flow->set(e, (*_lo)[e]); |
573 |
|
|
573 |
(*_excess)[v] += fc; |
|
574 | 574 |
if(!_level->active(v) && _tol.positive((*_excess)[v])) |
575 | 575 |
_level->activate(v); |
576 | 576 |
exc-=fc; |
577 | 577 |
} |
578 | 578 |
} |
579 | 579 |
else if((*_level)[v]<mlevel) mlevel=(*_level)[v]; |
580 | 580 |
} |
581 | 581 |
|
582 |
_excess |
|
582 |
(*_excess)[act] = exc; |
|
583 | 583 |
if(!_tol.positive(exc)) _level->deactivate(act); |
584 | 584 |
else if(mlevel==_node_num) { |
585 | 585 |
_level->liftHighestActiveToTop(); |
586 | 586 |
_el = _node_num; |
587 | 587 |
return false; |
588 | 588 |
} |
589 | 589 |
else { |
590 | 590 |
_level->liftHighestActive(mlevel+1); |
591 | 591 |
if(_level->onLevel(actlevel)==0) { |
592 | 592 |
_el = actlevel; |
593 | 593 |
return false; |
594 | 594 |
} |
595 | 595 |
} |
596 | 596 |
next_l: |
597 | 597 |
; |
598 | 598 |
} |
599 | 599 |
return true; |
600 | 600 |
} |
601 | 601 |
|
602 | 602 |
/// Runs the algorithm. |
603 | 603 |
|
604 | 604 |
/// This function runs the algorithm. |
605 | 605 |
/// |
606 | 606 |
/// \return \c true if a feasible circulation is found. |
607 | 607 |
/// |
608 | 608 |
/// \note Apart from the return value, c.run() is just a shortcut of |
609 | 609 |
/// the following code. |
610 | 610 |
/// \code |
611 | 611 |
/// c.greedyInit(); |
612 | 612 |
/// c.start(); |
613 | 613 |
/// \endcode |
614 | 614 |
bool run() { |
615 | 615 |
greedyInit(); |
616 | 616 |
return start(); |
617 | 617 |
} |
618 | 618 |
|
619 | 619 |
/// @} |
620 | 620 |
|
621 | 621 |
/// \name Query Functions |
622 | 622 |
/// The results of the circulation algorithm can be obtained using |
623 | 623 |
/// these functions.\n |
624 | 624 |
/// Either \ref run() or \ref start() should be called before |
625 | 625 |
/// using them. |
626 | 626 |
|
627 | 627 |
///@{ |
628 | 628 |
|
629 | 629 |
/// \brief Returns the flow on the given arc. |
630 | 630 |
/// |
631 | 631 |
/// Returns the flow on the given arc. |
632 | 632 |
/// |
633 | 633 |
/// \pre Either \ref run() or \ref init() must be called before |
634 | 634 |
/// using this function. |
635 | 635 |
Value flow(const Arc& arc) const { |
636 | 636 |
return (*_flow)[arc]; |
637 | 637 |
} |
638 | 638 |
|
639 | 639 |
/// \brief Returns a const reference to the flow map. |
640 | 640 |
/// |
641 | 641 |
/// Returns a const reference to the arc map storing the found flow. |
642 | 642 |
/// |
643 | 643 |
/// \pre Either \ref run() or \ref init() must be called before |
644 | 644 |
/// using this function. |
645 | 645 |
const FlowMap& flowMap() const { |
646 | 646 |
return *_flow; |
647 | 647 |
} |
648 | 648 |
|
649 | 649 |
/** |
650 | 650 |
\brief Returns \c true if the given node is in a barrier. |
651 | 651 |
|
652 | 652 |
Barrier is a set \e B of nodes for which |
653 | 653 |
|
654 | 654 |
\f[ \sum_{a\in\delta_{out}(B)} upper(a) - |
655 | 655 |
\sum_{a\in\delta_{in}(B)} lower(a) < \sum_{v\in B}delta(v) \f] |
656 | 656 |
|
657 | 657 |
holds. The existence of a set with this property prooves that a |
658 | 658 |
feasible circualtion cannot exist. |
659 | 659 |
|
660 | 660 |
This function returns \c true if the given node is in the found |
661 | 661 |
barrier. If a feasible circulation is found, the function |
662 | 662 |
gives back \c false for every node. |
663 | 663 |
|
664 | 664 |
\pre Either \ref run() or \ref init() must be called before |
665 | 665 |
using this function. |
666 | 666 |
|
667 | 667 |
\sa barrierMap() |
668 | 668 |
\sa checkBarrier() |
669 | 669 |
*/ |
670 | 670 |
bool barrier(const Node& node) const |
671 | 671 |
{ |
672 | 672 |
return (*_level)[node] >= _el; |
673 | 673 |
} |
674 | 674 |
|
675 | 675 |
/// \brief Gives back a barrier. |
676 | 676 |
/// |
677 | 677 |
/// This function sets \c bar to the characteristic vector of the |
678 | 678 |
/// found barrier. \c bar should be a \ref concepts::WriteMap "writable" |
... | ... |
@@ -1222,369 +1222,369 @@ |
1222 | 1222 |
public: |
1223 | 1223 |
typedef typename ItemSetTraits<GR, typename GR::Arc> |
1224 | 1224 |
::ItemNotifier::ObserverBase Parent; |
1225 | 1225 |
|
1226 | 1226 |
TEMPLATE_DIGRAPH_TYPEDEFS(GR); |
1227 | 1227 |
typedef GR Digraph; |
1228 | 1228 |
|
1229 | 1229 |
protected: |
1230 | 1230 |
|
1231 | 1231 |
class AutoNodeMap : public ItemSetTraits<GR, Node>::template Map<Arc>::Type { |
1232 | 1232 |
public: |
1233 | 1233 |
|
1234 | 1234 |
typedef typename ItemSetTraits<GR, Node>::template Map<Arc>::Type Parent; |
1235 | 1235 |
|
1236 | 1236 |
AutoNodeMap(const GR& digraph) : Parent(digraph, INVALID) {} |
1237 | 1237 |
|
1238 | 1238 |
virtual void add(const Node& node) { |
1239 | 1239 |
Parent::add(node); |
1240 | 1240 |
Parent::set(node, INVALID); |
1241 | 1241 |
} |
1242 | 1242 |
|
1243 | 1243 |
virtual void add(const std::vector<Node>& nodes) { |
1244 | 1244 |
Parent::add(nodes); |
1245 | 1245 |
for (int i = 0; i < int(nodes.size()); ++i) { |
1246 | 1246 |
Parent::set(nodes[i], INVALID); |
1247 | 1247 |
} |
1248 | 1248 |
} |
1249 | 1249 |
|
1250 | 1250 |
virtual void build() { |
1251 | 1251 |
Parent::build(); |
1252 | 1252 |
Node it; |
1253 | 1253 |
typename Parent::Notifier* nf = Parent::notifier(); |
1254 | 1254 |
for (nf->first(it); it != INVALID; nf->next(it)) { |
1255 | 1255 |
Parent::set(it, INVALID); |
1256 | 1256 |
} |
1257 | 1257 |
} |
1258 | 1258 |
}; |
1259 | 1259 |
|
1260 | 1260 |
const Digraph &_g; |
1261 | 1261 |
AutoNodeMap _head; |
1262 | 1262 |
typename Digraph::template ArcMap<Arc> _parent; |
1263 | 1263 |
typename Digraph::template ArcMap<Arc> _left; |
1264 | 1264 |
typename Digraph::template ArcMap<Arc> _right; |
1265 | 1265 |
|
1266 | 1266 |
class ArcLess { |
1267 | 1267 |
const Digraph &g; |
1268 | 1268 |
public: |
1269 | 1269 |
ArcLess(const Digraph &_g) : g(_g) {} |
1270 | 1270 |
bool operator()(Arc a,Arc b) const |
1271 | 1271 |
{ |
1272 | 1272 |
return g.target(a)<g.target(b); |
1273 | 1273 |
} |
1274 | 1274 |
}; |
1275 | 1275 |
|
1276 | 1276 |
public: |
1277 | 1277 |
|
1278 | 1278 |
///Constructor |
1279 | 1279 |
|
1280 | 1280 |
///Constructor. |
1281 | 1281 |
/// |
1282 | 1282 |
///It builds up the search database. |
1283 | 1283 |
DynArcLookUp(const Digraph &g) |
1284 | 1284 |
: _g(g),_head(g),_parent(g),_left(g),_right(g) |
1285 | 1285 |
{ |
1286 | 1286 |
Parent::attach(_g.notifier(typename Digraph::Arc())); |
1287 | 1287 |
refresh(); |
1288 | 1288 |
} |
1289 | 1289 |
|
1290 | 1290 |
protected: |
1291 | 1291 |
|
1292 | 1292 |
virtual void add(const Arc& arc) { |
1293 | 1293 |
insert(arc); |
1294 | 1294 |
} |
1295 | 1295 |
|
1296 | 1296 |
virtual void add(const std::vector<Arc>& arcs) { |
1297 | 1297 |
for (int i = 0; i < int(arcs.size()); ++i) { |
1298 | 1298 |
insert(arcs[i]); |
1299 | 1299 |
} |
1300 | 1300 |
} |
1301 | 1301 |
|
1302 | 1302 |
virtual void erase(const Arc& arc) { |
1303 | 1303 |
remove(arc); |
1304 | 1304 |
} |
1305 | 1305 |
|
1306 | 1306 |
virtual void erase(const std::vector<Arc>& arcs) { |
1307 | 1307 |
for (int i = 0; i < int(arcs.size()); ++i) { |
1308 | 1308 |
remove(arcs[i]); |
1309 | 1309 |
} |
1310 | 1310 |
} |
1311 | 1311 |
|
1312 | 1312 |
virtual void build() { |
1313 | 1313 |
refresh(); |
1314 | 1314 |
} |
1315 | 1315 |
|
1316 | 1316 |
virtual void clear() { |
1317 | 1317 |
for(NodeIt n(_g);n!=INVALID;++n) { |
1318 |
_head |
|
1318 |
_head[n] = INVALID; |
|
1319 | 1319 |
} |
1320 | 1320 |
} |
1321 | 1321 |
|
1322 | 1322 |
void insert(Arc arc) { |
1323 | 1323 |
Node s = _g.source(arc); |
1324 | 1324 |
Node t = _g.target(arc); |
1325 |
_left.set(arc, INVALID); |
|
1326 |
_right.set(arc, INVALID); |
|
1325 |
_left[arc] = INVALID; |
|
1326 |
_right[arc] = INVALID; |
|
1327 | 1327 |
|
1328 | 1328 |
Arc e = _head[s]; |
1329 | 1329 |
if (e == INVALID) { |
1330 |
_head.set(s, arc); |
|
1331 |
_parent.set(arc, INVALID); |
|
1330 |
_head[s] = arc; |
|
1331 |
_parent[arc] = INVALID; |
|
1332 | 1332 |
return; |
1333 | 1333 |
} |
1334 | 1334 |
while (true) { |
1335 | 1335 |
if (t < _g.target(e)) { |
1336 | 1336 |
if (_left[e] == INVALID) { |
1337 |
_left.set(e, arc); |
|
1338 |
_parent.set(arc, e); |
|
1337 |
_left[e] = arc; |
|
1338 |
_parent[arc] = e; |
|
1339 | 1339 |
splay(arc); |
1340 | 1340 |
return; |
1341 | 1341 |
} else { |
1342 | 1342 |
e = _left[e]; |
1343 | 1343 |
} |
1344 | 1344 |
} else { |
1345 | 1345 |
if (_right[e] == INVALID) { |
1346 |
_right.set(e, arc); |
|
1347 |
_parent.set(arc, e); |
|
1346 |
_right[e] = arc; |
|
1347 |
_parent[arc] = e; |
|
1348 | 1348 |
splay(arc); |
1349 | 1349 |
return; |
1350 | 1350 |
} else { |
1351 | 1351 |
e = _right[e]; |
1352 | 1352 |
} |
1353 | 1353 |
} |
1354 | 1354 |
} |
1355 | 1355 |
} |
1356 | 1356 |
|
1357 | 1357 |
void remove(Arc arc) { |
1358 | 1358 |
if (_left[arc] == INVALID) { |
1359 | 1359 |
if (_right[arc] != INVALID) { |
1360 |
_parent |
|
1360 |
_parent[_right[arc]] = _parent[arc]; |
|
1361 | 1361 |
} |
1362 | 1362 |
if (_parent[arc] != INVALID) { |
1363 | 1363 |
if (_left[_parent[arc]] == arc) { |
1364 |
_left |
|
1364 |
_left[_parent[arc]] = _right[arc]; |
|
1365 | 1365 |
} else { |
1366 |
_right |
|
1366 |
_right[_parent[arc]] = _right[arc]; |
|
1367 | 1367 |
} |
1368 | 1368 |
} else { |
1369 |
_head |
|
1369 |
_head[_g.source(arc)] = _right[arc]; |
|
1370 | 1370 |
} |
1371 | 1371 |
} else if (_right[arc] == INVALID) { |
1372 |
_parent |
|
1372 |
_parent[_left[arc]] = _parent[arc]; |
|
1373 | 1373 |
if (_parent[arc] != INVALID) { |
1374 | 1374 |
if (_left[_parent[arc]] == arc) { |
1375 |
_left |
|
1375 |
_left[_parent[arc]] = _left[arc]; |
|
1376 | 1376 |
} else { |
1377 |
_right |
|
1377 |
_right[_parent[arc]] = _left[arc]; |
|
1378 | 1378 |
} |
1379 | 1379 |
} else { |
1380 |
_head |
|
1380 |
_head[_g.source(arc)] = _left[arc]; |
|
1381 | 1381 |
} |
1382 | 1382 |
} else { |
1383 | 1383 |
Arc e = _left[arc]; |
1384 | 1384 |
if (_right[e] != INVALID) { |
1385 | 1385 |
e = _right[e]; |
1386 | 1386 |
while (_right[e] != INVALID) { |
1387 | 1387 |
e = _right[e]; |
1388 | 1388 |
} |
1389 | 1389 |
Arc s = _parent[e]; |
1390 |
_right |
|
1390 |
_right[_parent[e]] = _left[e]; |
|
1391 | 1391 |
if (_left[e] != INVALID) { |
1392 |
_parent |
|
1392 |
_parent[_left[e]] = _parent[e]; |
|
1393 | 1393 |
} |
1394 | 1394 |
|
1395 |
_left.set(e, _left[arc]); |
|
1396 |
_parent.set(_left[arc], e); |
|
1397 |
_right.set(e, _right[arc]); |
|
1398 |
_parent.set(_right[arc], e); |
|
1395 |
_left[e] = _left[arc]; |
|
1396 |
_parent[_left[arc]] = e; |
|
1397 |
_right[e] = _right[arc]; |
|
1398 |
_parent[_right[arc]] = e; |
|
1399 | 1399 |
|
1400 |
_parent |
|
1400 |
_parent[e] = _parent[arc]; |
|
1401 | 1401 |
if (_parent[arc] != INVALID) { |
1402 | 1402 |
if (_left[_parent[arc]] == arc) { |
1403 |
_left |
|
1403 |
_left[_parent[arc]] = e; |
|
1404 | 1404 |
} else { |
1405 |
_right |
|
1405 |
_right[_parent[arc]] = e; |
|
1406 | 1406 |
} |
1407 | 1407 |
} |
1408 | 1408 |
splay(s); |
1409 | 1409 |
} else { |
1410 |
_right.set(e, _right[arc]); |
|
1411 |
_parent.set(_right[arc], e); |
|
1412 |
|
|
1410 |
_right[e] = _right[arc]; |
|
1411 |
_parent[_right[arc]] = e; |
|
1412 |
_parent[e] = _parent[arc]; |
|
1413 | 1413 |
|
1414 | 1414 |
if (_parent[arc] != INVALID) { |
1415 | 1415 |
if (_left[_parent[arc]] == arc) { |
1416 |
_left |
|
1416 |
_left[_parent[arc]] = e; |
|
1417 | 1417 |
} else { |
1418 |
_right |
|
1418 |
_right[_parent[arc]] = e; |
|
1419 | 1419 |
} |
1420 | 1420 |
} else { |
1421 |
_head |
|
1421 |
_head[_g.source(arc)] = e; |
|
1422 | 1422 |
} |
1423 | 1423 |
} |
1424 | 1424 |
} |
1425 | 1425 |
} |
1426 | 1426 |
|
1427 | 1427 |
Arc refreshRec(std::vector<Arc> &v,int a,int b) |
1428 | 1428 |
{ |
1429 | 1429 |
int m=(a+b)/2; |
1430 | 1430 |
Arc me=v[m]; |
1431 | 1431 |
if (a < m) { |
1432 | 1432 |
Arc left = refreshRec(v,a,m-1); |
1433 |
_left.set(me, left); |
|
1434 |
_parent.set(left, me); |
|
1433 |
_left[me] = left; |
|
1434 |
_parent[left] = me; |
|
1435 | 1435 |
} else { |
1436 |
_left |
|
1436 |
_left[me] = INVALID; |
|
1437 | 1437 |
} |
1438 | 1438 |
if (m < b) { |
1439 | 1439 |
Arc right = refreshRec(v,m+1,b); |
1440 |
_right.set(me, right); |
|
1441 |
_parent.set(right, me); |
|
1440 |
_right[me] = right; |
|
1441 |
_parent[right] = me; |
|
1442 | 1442 |
} else { |
1443 |
_right |
|
1443 |
_right[me] = INVALID; |
|
1444 | 1444 |
} |
1445 | 1445 |
return me; |
1446 | 1446 |
} |
1447 | 1447 |
|
1448 | 1448 |
void refresh() { |
1449 | 1449 |
for(NodeIt n(_g);n!=INVALID;++n) { |
1450 | 1450 |
std::vector<Arc> v; |
1451 | 1451 |
for(OutArcIt a(_g,n);a!=INVALID;++a) v.push_back(a); |
1452 | 1452 |
if (!v.empty()) { |
1453 | 1453 |
std::sort(v.begin(),v.end(),ArcLess(_g)); |
1454 | 1454 |
Arc head = refreshRec(v,0,v.size()-1); |
1455 |
_head.set(n, head); |
|
1456 |
_parent.set(head, INVALID); |
|
1455 |
_head[n] = head; |
|
1456 |
_parent[head] = INVALID; |
|
1457 | 1457 |
} |
1458 |
else _head |
|
1458 |
else _head[n] = INVALID; |
|
1459 | 1459 |
} |
1460 | 1460 |
} |
1461 | 1461 |
|
1462 | 1462 |
void zig(Arc v) { |
1463 | 1463 |
Arc w = _parent[v]; |
1464 |
_parent.set(v, _parent[w]); |
|
1465 |
_parent.set(w, v); |
|
1466 |
_left.set(w, _right[v]); |
|
1467 |
_right.set(v, w); |
|
1464 |
_parent[v] = _parent[w]; |
|
1465 |
_parent[w] = v; |
|
1466 |
_left[w] = _right[v]; |
|
1467 |
_right[v] = w; |
|
1468 | 1468 |
if (_parent[v] != INVALID) { |
1469 | 1469 |
if (_right[_parent[v]] == w) { |
1470 |
_right |
|
1470 |
_right[_parent[v]] = v; |
|
1471 | 1471 |
} else { |
1472 |
_left |
|
1472 |
_left[_parent[v]] = v; |
|
1473 | 1473 |
} |
1474 | 1474 |
} |
1475 | 1475 |
if (_left[w] != INVALID){ |
1476 |
_parent |
|
1476 |
_parent[_left[w]] = w; |
|
1477 | 1477 |
} |
1478 | 1478 |
} |
1479 | 1479 |
|
1480 | 1480 |
void zag(Arc v) { |
1481 | 1481 |
Arc w = _parent[v]; |
1482 |
_parent.set(v, _parent[w]); |
|
1483 |
_parent.set(w, v); |
|
1484 |
_right.set(w, _left[v]); |
|
1485 |
_left.set(v, w); |
|
1482 |
_parent[v] = _parent[w]; |
|
1483 |
_parent[w] = v; |
|
1484 |
_right[w] = _left[v]; |
|
1485 |
_left[v] = w; |
|
1486 | 1486 |
if (_parent[v] != INVALID){ |
1487 | 1487 |
if (_left[_parent[v]] == w) { |
1488 |
_left |
|
1488 |
_left[_parent[v]] = v; |
|
1489 | 1489 |
} else { |
1490 |
_right |
|
1490 |
_right[_parent[v]] = v; |
|
1491 | 1491 |
} |
1492 | 1492 |
} |
1493 | 1493 |
if (_right[w] != INVALID){ |
1494 |
_parent |
|
1494 |
_parent[_right[w]] = w; |
|
1495 | 1495 |
} |
1496 | 1496 |
} |
1497 | 1497 |
|
1498 | 1498 |
void splay(Arc v) { |
1499 | 1499 |
while (_parent[v] != INVALID) { |
1500 | 1500 |
if (v == _left[_parent[v]]) { |
1501 | 1501 |
if (_parent[_parent[v]] == INVALID) { |
1502 | 1502 |
zig(v); |
1503 | 1503 |
} else { |
1504 | 1504 |
if (_parent[v] == _left[_parent[_parent[v]]]) { |
1505 | 1505 |
zig(_parent[v]); |
1506 | 1506 |
zig(v); |
1507 | 1507 |
} else { |
1508 | 1508 |
zig(v); |
1509 | 1509 |
zag(v); |
1510 | 1510 |
} |
1511 | 1511 |
} |
1512 | 1512 |
} else { |
1513 | 1513 |
if (_parent[_parent[v]] == INVALID) { |
1514 | 1514 |
zag(v); |
1515 | 1515 |
} else { |
1516 | 1516 |
if (_parent[v] == _left[_parent[_parent[v]]]) { |
1517 | 1517 |
zag(v); |
1518 | 1518 |
zig(v); |
1519 | 1519 |
} else { |
1520 | 1520 |
zag(_parent[v]); |
1521 | 1521 |
zag(v); |
1522 | 1522 |
} |
1523 | 1523 |
} |
1524 | 1524 |
} |
1525 | 1525 |
} |
1526 | 1526 |
_head[_g.source(v)] = v; |
1527 | 1527 |
} |
1528 | 1528 |
|
1529 | 1529 |
|
1530 | 1530 |
public: |
1531 | 1531 |
|
1532 | 1532 |
///Find an arc between two nodes. |
1533 | 1533 |
|
1534 | 1534 |
///Find an arc between two nodes. |
1535 | 1535 |
///\param s The source node. |
1536 | 1536 |
///\param t The target node. |
1537 | 1537 |
///\param p The previous arc between \c s and \c t. It it is INVALID or |
1538 | 1538 |
///not given, the operator finds the first appropriate arc. |
1539 | 1539 |
///\return An arc from \c s to \c t after \c p or |
1540 | 1540 |
///\ref INVALID if there is no more. |
1541 | 1541 |
/// |
1542 | 1542 |
///For example, you can count the number of arcs from \c u to \c v in the |
1543 | 1543 |
///following way. |
1544 | 1544 |
///\code |
1545 | 1545 |
///DynArcLookUp<ListDigraph> ae(g); |
1546 | 1546 |
///... |
1547 | 1547 |
///int n = 0; |
1548 | 1548 |
///for(Arc a = ae(u,v); a != INVALID; a = ae(u,v,a)) n++; |
1549 | 1549 |
///\endcode |
1550 | 1550 |
/// |
1551 | 1551 |
///Finding the arcs take at most <em>O</em>(log<em>d</em>) |
1552 | 1552 |
///amortized time, specifically, the time complexity of the lookups |
1553 | 1553 |
///is equal to the optimal search tree implementation for the |
1554 | 1554 |
///current query distribution in a constant factor. |
1555 | 1555 |
/// |
1556 | 1556 |
///\note This is a dynamic data structure, therefore the data |
1557 | 1557 |
///structure is updated after each graph alteration. Thus although |
1558 | 1558 |
///this data structure is theoretically faster than \ref ArcLookUp |
1559 | 1559 |
///and \ref AllArcLookUp, it often provides worse performance than |
1560 | 1560 |
///them. |
1561 | 1561 |
Arc operator()(Node s, Node t, Arc p = INVALID) const { |
1562 | 1562 |
if (p == INVALID) { |
1563 | 1563 |
Arc a = _head[s]; |
1564 | 1564 |
if (a == INVALID) return INVALID; |
1565 | 1565 |
Arc r = INVALID; |
1566 | 1566 |
while (true) { |
1567 | 1567 |
if (_g.target(a) < t) { |
1568 | 1568 |
if (_right[a] == INVALID) { |
1569 | 1569 |
const_cast<DynArcLookUp&>(*this).splay(a); |
1570 | 1570 |
return r; |
1571 | 1571 |
} else { |
1572 | 1572 |
a = _right[a]; |
1573 | 1573 |
} |
1574 | 1574 |
} else { |
1575 | 1575 |
if (_g.target(a) == t) { |
1576 | 1576 |
r = a; |
1577 | 1577 |
} |
1578 | 1578 |
if (_left[a] == INVALID) { |
1579 | 1579 |
const_cast<DynArcLookUp&>(*this).splay(a); |
1580 | 1580 |
return r; |
1581 | 1581 |
} else { |
1582 | 1582 |
a = _left[a]; |
1583 | 1583 |
} |
1584 | 1584 |
} |
1585 | 1585 |
} |
1586 | 1586 |
} else { |
1587 | 1587 |
Arc a = p; |
1588 | 1588 |
if (_right[a] != INVALID) { |
1589 | 1589 |
a = _right[a]; |
1590 | 1590 |
while (_left[a] != INVALID) { |
1 | 1 |
/* -*- mode: C++; indent-tabs-mode: nil; -*- |
2 | 2 |
* |
3 | 3 |
* This file is a part of LEMON, a generic C++ optimization library. |
4 | 4 |
* |
5 | 5 |
* Copyright (C) 2003-2009 |
6 | 6 |
* Egervary Jeno Kombinatorikus Optimalizalasi Kutatocsoport |
7 | 7 |
* (Egervary Research Group on Combinatorial Optimization, EGRES). |
8 | 8 |
* |
9 | 9 |
* Permission to use, modify and distribute this software is granted |
10 | 10 |
* provided that this copyright notice appears in all copies. For |
11 | 11 |
* precise terms see the accompanying LICENSE file. |
12 | 12 |
* |
13 | 13 |
* This software is provided "AS IS" with no warranty of any kind, |
14 | 14 |
* express or implied, and with no claim as to its suitability for any |
15 | 15 |
* purpose. |
16 | 16 |
* |
17 | 17 |
*/ |
18 | 18 |
|
19 | 19 |
#ifndef LEMON_ELEVATOR_H |
20 | 20 |
#define LEMON_ELEVATOR_H |
21 | 21 |
|
22 | 22 |
///\ingroup auxdat |
23 | 23 |
///\file |
24 | 24 |
///\brief Elevator class |
25 | 25 |
/// |
26 | 26 |
///Elevator class implements an efficient data structure |
27 | 27 |
///for labeling items in push-relabel type algorithms. |
28 | 28 |
/// |
29 | 29 |
|
30 | 30 |
#include <lemon/core.h> |
31 | 31 |
#include <lemon/bits/traits.h> |
32 | 32 |
|
33 | 33 |
namespace lemon { |
34 | 34 |
|
35 | 35 |
///Class for handling "labels" in push-relabel type algorithms. |
36 | 36 |
|
37 | 37 |
///A class for handling "labels" in push-relabel type algorithms. |
38 | 38 |
/// |
39 | 39 |
///\ingroup auxdat |
40 | 40 |
///Using this class you can assign "labels" (nonnegative integer numbers) |
41 | 41 |
///to the edges or nodes of a graph, manipulate and query them through |
42 | 42 |
///operations typically arising in "push-relabel" type algorithms. |
43 | 43 |
/// |
44 | 44 |
///Each item is either \em active or not, and you can also choose a |
45 | 45 |
///highest level active item. |
46 | 46 |
/// |
47 | 47 |
///\sa LinkedElevator |
48 | 48 |
/// |
49 | 49 |
///\param GR Type of the underlying graph. |
50 | 50 |
///\param Item Type of the items the data is assigned to (\c GR::Node, |
51 | 51 |
///\c GR::Arc or \c GR::Edge). |
52 | 52 |
template<class GR, class Item> |
53 | 53 |
class Elevator |
54 | 54 |
{ |
55 | 55 |
public: |
56 | 56 |
|
57 | 57 |
typedef Item Key; |
58 | 58 |
typedef int Value; |
59 | 59 |
|
60 | 60 |
private: |
61 | 61 |
|
62 | 62 |
typedef Item *Vit; |
63 | 63 |
typedef typename ItemSetTraits<GR,Item>::template Map<Vit>::Type VitMap; |
64 | 64 |
typedef typename ItemSetTraits<GR,Item>::template Map<int>::Type IntMap; |
65 | 65 |
|
66 | 66 |
const GR &_g; |
67 | 67 |
int _max_level; |
68 | 68 |
int _item_num; |
69 | 69 |
VitMap _where; |
70 | 70 |
IntMap _level; |
71 | 71 |
std::vector<Item> _items; |
72 | 72 |
std::vector<Vit> _first; |
73 | 73 |
std::vector<Vit> _last_active; |
74 | 74 |
|
75 | 75 |
int _highest_active; |
76 | 76 |
|
77 | 77 |
void copy(Item i, Vit p) |
78 | 78 |
{ |
79 |
_where |
|
79 |
_where[*p=i] = p; |
|
80 | 80 |
} |
81 | 81 |
void copy(Vit s, Vit p) |
82 | 82 |
{ |
83 | 83 |
if(s!=p) |
84 | 84 |
{ |
85 | 85 |
Item i=*s; |
86 | 86 |
*p=i; |
87 |
_where |
|
87 |
_where[i] = p; |
|
88 | 88 |
} |
89 | 89 |
} |
90 | 90 |
void swap(Vit i, Vit j) |
91 | 91 |
{ |
92 | 92 |
Item ti=*i; |
93 | 93 |
Vit ct = _where[ti]; |
94 |
_where.set(ti,_where[*i=*j]); |
|
95 |
_where.set(*j,ct); |
|
94 |
_where[ti] = _where[*i=*j]; |
|
95 |
_where[*j] = ct; |
|
96 | 96 |
*j=ti; |
97 | 97 |
} |
98 | 98 |
|
99 | 99 |
public: |
100 | 100 |
|
101 | 101 |
///Constructor with given maximum level. |
102 | 102 |
|
103 | 103 |
///Constructor with given maximum level. |
104 | 104 |
/// |
105 | 105 |
///\param graph The underlying graph. |
106 | 106 |
///\param max_level The maximum allowed level. |
107 | 107 |
///Set the range of the possible labels to <tt>[0..max_level]</tt>. |
108 | 108 |
Elevator(const GR &graph,int max_level) : |
109 | 109 |
_g(graph), |
110 | 110 |
_max_level(max_level), |
111 | 111 |
_item_num(_max_level), |
112 | 112 |
_where(graph), |
113 | 113 |
_level(graph,0), |
114 | 114 |
_items(_max_level), |
115 | 115 |
_first(_max_level+2), |
116 | 116 |
_last_active(_max_level+2), |
117 | 117 |
_highest_active(-1) {} |
118 | 118 |
///Constructor. |
119 | 119 |
|
120 | 120 |
///Constructor. |
121 | 121 |
/// |
122 | 122 |
///\param graph The underlying graph. |
123 | 123 |
///Set the range of the possible labels to <tt>[0..max_level]</tt>, |
124 | 124 |
///where \c max_level is equal to the number of labeled items in the graph. |
125 | 125 |
Elevator(const GR &graph) : |
126 | 126 |
_g(graph), |
127 | 127 |
_max_level(countItems<GR, Item>(graph)), |
128 | 128 |
_item_num(_max_level), |
129 | 129 |
_where(graph), |
130 | 130 |
_level(graph,0), |
131 | 131 |
_items(_max_level), |
132 | 132 |
_first(_max_level+2), |
133 | 133 |
_last_active(_max_level+2), |
134 | 134 |
_highest_active(-1) |
135 | 135 |
{ |
136 | 136 |
} |
137 | 137 |
|
138 | 138 |
///Activate item \c i. |
139 | 139 |
|
140 | 140 |
///Activate item \c i. |
141 | 141 |
///\pre Item \c i shouldn't be active before. |
142 | 142 |
void activate(Item i) |
143 | 143 |
{ |
144 | 144 |
const int l=_level[i]; |
145 | 145 |
swap(_where[i],++_last_active[l]); |
146 | 146 |
if(l>_highest_active) _highest_active=l; |
147 | 147 |
} |
148 | 148 |
|
149 | 149 |
///Deactivate item \c i. |
150 | 150 |
|
151 | 151 |
///Deactivate item \c i. |
152 | 152 |
///\pre Item \c i must be active before. |
153 | 153 |
void deactivate(Item i) |
154 | 154 |
{ |
155 | 155 |
swap(_where[i],_last_active[_level[i]]--); |
156 | 156 |
while(_highest_active>=0 && |
157 | 157 |
_last_active[_highest_active]<_first[_highest_active]) |
158 | 158 |
_highest_active--; |
159 | 159 |
} |
160 | 160 |
|
161 | 161 |
///Query whether item \c i is active |
162 | 162 |
bool active(Item i) const { return _where[i]<=_last_active[_level[i]]; } |
163 | 163 |
|
164 | 164 |
///Return the level of item \c i. |
165 | 165 |
int operator[](Item i) const { return _level[i]; } |
166 | 166 |
|
167 | 167 |
///Return the number of items on level \c l. |
168 | 168 |
int onLevel(int l) const |
169 | 169 |
{ |
170 | 170 |
return _first[l+1]-_first[l]; |
171 | 171 |
} |
172 | 172 |
///Return true if level \c l is empty. |
173 | 173 |
bool emptyLevel(int l) const |
174 | 174 |
{ |
175 | 175 |
return _first[l+1]-_first[l]==0; |
176 | 176 |
} |
177 | 177 |
///Return the number of items above level \c l. |
178 | 178 |
int aboveLevel(int l) const |
179 | 179 |
{ |
180 | 180 |
return _first[_max_level+1]-_first[l+1]; |
181 | 181 |
} |
182 | 182 |
///Return the number of active items on level \c l. |
183 | 183 |
int activesOnLevel(int l) const |
184 | 184 |
{ |
185 | 185 |
return _last_active[l]-_first[l]+1; |
186 | 186 |
} |
187 | 187 |
///Return true if there is no active item on level \c l. |
188 | 188 |
bool activeFree(int l) const |
189 | 189 |
{ |
190 | 190 |
return _last_active[l]<_first[l]; |
191 | 191 |
} |
192 | 192 |
///Return the maximum allowed level. |
193 | 193 |
int maxLevel() const |
194 | 194 |
{ |
195 | 195 |
return _max_level; |
196 | 196 |
} |
197 | 197 |
|
198 | 198 |
///\name Highest Active Item |
199 | 199 |
///Functions for working with the highest level |
200 | 200 |
///active item. |
201 | 201 |
|
202 | 202 |
///@{ |
203 | 203 |
|
204 | 204 |
///Return a highest level active item. |
205 | 205 |
|
206 | 206 |
///Return a highest level active item or INVALID if there is no active |
207 | 207 |
///item. |
208 | 208 |
Item highestActive() const |
209 | 209 |
{ |
210 | 210 |
return _highest_active>=0?*_last_active[_highest_active]:INVALID; |
211 | 211 |
} |
212 | 212 |
|
213 | 213 |
///Return the highest active level. |
214 | 214 |
|
215 | 215 |
///Return the level of the highest active item or -1 if there is no active |
216 | 216 |
///item. |
217 | 217 |
int highestActiveLevel() const |
218 | 218 |
{ |
219 | 219 |
return _highest_active; |
220 | 220 |
} |
221 | 221 |
|
222 | 222 |
///Lift the highest active item by one. |
223 | 223 |
|
224 | 224 |
///Lift the item returned by highestActive() by one. |
225 | 225 |
/// |
226 | 226 |
void liftHighestActive() |
227 | 227 |
{ |
228 | 228 |
Item it = *_last_active[_highest_active]; |
229 |
|
|
229 |
++_level[it]; |
|
230 | 230 |
swap(_last_active[_highest_active]--,_last_active[_highest_active+1]); |
231 | 231 |
--_first[++_highest_active]; |
232 | 232 |
} |
233 | 233 |
|
234 | 234 |
///Lift the highest active item to the given level. |
235 | 235 |
|
236 | 236 |
///Lift the item returned by highestActive() to level \c new_level. |
237 | 237 |
/// |
238 | 238 |
///\warning \c new_level must be strictly higher |
239 | 239 |
///than the current level. |
240 | 240 |
/// |
241 | 241 |
void liftHighestActive(int new_level) |
242 | 242 |
{ |
243 | 243 |
const Item li = *_last_active[_highest_active]; |
244 | 244 |
|
245 | 245 |
copy(--_first[_highest_active+1],_last_active[_highest_active]--); |
246 | 246 |
for(int l=_highest_active+1;l<new_level;l++) |
247 | 247 |
{ |
248 | 248 |
copy(--_first[l+1],_first[l]); |
249 | 249 |
--_last_active[l]; |
250 | 250 |
} |
251 | 251 |
copy(li,_first[new_level]); |
252 |
_level |
|
252 |
_level[li] = new_level; |
|
253 | 253 |
_highest_active=new_level; |
254 | 254 |
} |
255 | 255 |
|
256 | 256 |
///Lift the highest active item to the top level. |
257 | 257 |
|
258 | 258 |
///Lift the item returned by highestActive() to the top level and |
259 | 259 |
///deactivate it. |
260 | 260 |
void liftHighestActiveToTop() |
261 | 261 |
{ |
262 | 262 |
const Item li = *_last_active[_highest_active]; |
263 | 263 |
|
264 | 264 |
copy(--_first[_highest_active+1],_last_active[_highest_active]--); |
265 | 265 |
for(int l=_highest_active+1;l<_max_level;l++) |
266 | 266 |
{ |
267 | 267 |
copy(--_first[l+1],_first[l]); |
268 | 268 |
--_last_active[l]; |
269 | 269 |
} |
270 | 270 |
copy(li,_first[_max_level]); |
271 | 271 |
--_last_active[_max_level]; |
272 |
_level |
|
272 |
_level[li] = _max_level; |
|
273 | 273 |
|
274 | 274 |
while(_highest_active>=0 && |
275 | 275 |
_last_active[_highest_active]<_first[_highest_active]) |
276 | 276 |
_highest_active--; |
277 | 277 |
} |
278 | 278 |
|
279 | 279 |
///@} |
280 | 280 |
|
281 | 281 |
///\name Active Item on Certain Level |
282 | 282 |
///Functions for working with the active items. |
283 | 283 |
|
284 | 284 |
///@{ |
285 | 285 |
|
286 | 286 |
///Return an active item on level \c l. |
287 | 287 |
|
288 | 288 |
///Return an active item on level \c l or \ref INVALID if there is no such |
289 | 289 |
///an item. (\c l must be from the range [0...\c max_level]. |
290 | 290 |
Item activeOn(int l) const |
291 | 291 |
{ |
292 | 292 |
return _last_active[l]>=_first[l]?*_last_active[l]:INVALID; |
293 | 293 |
} |
294 | 294 |
|
295 | 295 |
///Lift the active item returned by \c activeOn(level) by one. |
296 | 296 |
|
297 | 297 |
///Lift the active item returned by \ref activeOn() "activeOn(level)" |
298 | 298 |
///by one. |
299 | 299 |
Item liftActiveOn(int level) |
300 | 300 |
{ |
301 | 301 |
Item it =*_last_active[level]; |
302 |
|
|
302 |
++_level[it]; |
|
303 | 303 |
swap(_last_active[level]--, --_first[level+1]); |
304 | 304 |
if (level+1>_highest_active) ++_highest_active; |
305 | 305 |
} |
306 | 306 |
|
307 | 307 |
///Lift the active item returned by \c activeOn(level) to the given level. |
308 | 308 |
|
309 | 309 |
///Lift the active item returned by \ref activeOn() "activeOn(level)" |
310 | 310 |
///to the given level. |
311 | 311 |
void liftActiveOn(int level, int new_level) |
312 | 312 |
{ |
313 | 313 |
const Item ai = *_last_active[level]; |
314 | 314 |
|
315 | 315 |
copy(--_first[level+1], _last_active[level]--); |
316 | 316 |
for(int l=level+1;l<new_level;l++) |
317 | 317 |
{ |
318 | 318 |
copy(_last_active[l],_first[l]); |
319 | 319 |
copy(--_first[l+1], _last_active[l]--); |
320 | 320 |
} |
321 | 321 |
copy(ai,_first[new_level]); |
322 |
_level |
|
322 |
_level[ai] = new_level; |
|
323 | 323 |
if (new_level>_highest_active) _highest_active=new_level; |
324 | 324 |
} |
325 | 325 |
|
326 | 326 |
///Lift the active item returned by \c activeOn(level) to the top level. |
327 | 327 |
|
328 | 328 |
///Lift the active item returned by \ref activeOn() "activeOn(level)" |
329 | 329 |
///to the top level and deactivate it. |
330 | 330 |
void liftActiveToTop(int level) |
331 | 331 |
{ |
332 | 332 |
const Item ai = *_last_active[level]; |
333 | 333 |
|
334 | 334 |
copy(--_first[level+1],_last_active[level]--); |
335 | 335 |
for(int l=level+1;l<_max_level;l++) |
336 | 336 |
{ |
337 | 337 |
copy(_last_active[l],_first[l]); |
338 | 338 |
copy(--_first[l+1], _last_active[l]--); |
339 | 339 |
} |
340 | 340 |
copy(ai,_first[_max_level]); |
341 | 341 |
--_last_active[_max_level]; |
342 |
_level |
|
342 |
_level[ai] = _max_level; |
|
343 | 343 |
|
344 | 344 |
if (_highest_active==level) { |
345 | 345 |
while(_highest_active>=0 && |
346 | 346 |
_last_active[_highest_active]<_first[_highest_active]) |
347 | 347 |
_highest_active--; |
348 | 348 |
} |
349 | 349 |
} |
350 | 350 |
|
351 | 351 |
///@} |
352 | 352 |
|
353 | 353 |
///Lift an active item to a higher level. |
354 | 354 |
|
355 | 355 |
///Lift an active item to a higher level. |
356 | 356 |
///\param i The item to be lifted. It must be active. |
357 | 357 |
///\param new_level The new level of \c i. It must be strictly higher |
358 | 358 |
///than the current level. |
359 | 359 |
/// |
360 | 360 |
void lift(Item i, int new_level) |
361 | 361 |
{ |
362 | 362 |
const int lo = _level[i]; |
363 | 363 |
const Vit w = _where[i]; |
364 | 364 |
|
365 | 365 |
copy(_last_active[lo],w); |
366 | 366 |
copy(--_first[lo+1],_last_active[lo]--); |
367 | 367 |
for(int l=lo+1;l<new_level;l++) |
368 | 368 |
{ |
369 | 369 |
copy(_last_active[l],_first[l]); |
370 | 370 |
copy(--_first[l+1],_last_active[l]--); |
371 | 371 |
} |
372 | 372 |
copy(i,_first[new_level]); |
373 |
_level |
|
373 |
_level[i] = new_level; |
|
374 | 374 |
if(new_level>_highest_active) _highest_active=new_level; |
375 | 375 |
} |
376 | 376 |
|
377 | 377 |
///Move an inactive item to the top but one level (in a dirty way). |
378 | 378 |
|
379 | 379 |
///This function moves an inactive item from the top level to the top |
380 | 380 |
///but one level (in a dirty way). |
381 | 381 |
///\warning It makes the underlying datastructure corrupt, so use it |
382 | 382 |
///only if you really know what it is for. |
383 | 383 |
///\pre The item is on the top level. |
384 | 384 |
void dirtyTopButOne(Item i) { |
385 |
_level |
|
385 |
_level[i] = _max_level - 1; |
|
386 | 386 |
} |
387 | 387 |
|
388 | 388 |
///Lift all items on and above the given level to the top level. |
389 | 389 |
|
390 | 390 |
///This function lifts all items on and above level \c l to the top |
391 | 391 |
///level and deactivates them. |
392 | 392 |
void liftToTop(int l) |
393 | 393 |
{ |
394 | 394 |
const Vit f=_first[l]; |
395 | 395 |
const Vit tl=_first[_max_level]; |
396 | 396 |
for(Vit i=f;i!=tl;++i) |
397 |
_level |
|
397 |
_level[*i] = _max_level; |
|
398 | 398 |
for(int i=l;i<=_max_level;i++) |
399 | 399 |
{ |
400 | 400 |
_first[i]=f; |
401 | 401 |
_last_active[i]=f-1; |
402 | 402 |
} |
403 | 403 |
for(_highest_active=l-1; |
404 | 404 |
_highest_active>=0 && |
405 | 405 |
_last_active[_highest_active]<_first[_highest_active]; |
406 | 406 |
_highest_active--) ; |
407 | 407 |
} |
408 | 408 |
|
409 | 409 |
private: |
410 | 410 |
int _init_lev; |
411 | 411 |
Vit _init_num; |
412 | 412 |
|
413 | 413 |
public: |
414 | 414 |
|
415 | 415 |
///\name Initialization |
416 | 416 |
///Using these functions you can initialize the levels of the items. |
417 | 417 |
///\n |
418 | 418 |
///The initialization must be started with calling \c initStart(). |
419 | 419 |
///Then the items should be listed level by level starting with the |
420 | 420 |
///lowest one (level 0) using \c initAddItem() and \c initNewLevel(). |
421 | 421 |
///Finally \c initFinish() must be called. |
422 | 422 |
///The items not listed are put on the highest level. |
423 | 423 |
///@{ |
424 | 424 |
|
425 | 425 |
///Start the initialization process. |
426 | 426 |
void initStart() |
427 | 427 |
{ |
428 | 428 |
_init_lev=0; |
429 | 429 |
_init_num=&_items[0]; |
430 | 430 |
_first[0]=&_items[0]; |
431 | 431 |
_last_active[0]=&_items[0]-1; |
432 | 432 |
Vit n=&_items[0]; |
433 | 433 |
for(typename ItemSetTraits<GR,Item>::ItemIt i(_g);i!=INVALID;++i) |
434 | 434 |
{ |
435 | 435 |
*n=i; |
436 |
_where.set(i,n); |
|
437 |
_level.set(i,_max_level); |
|
436 |
_where[i] = n; |
|
437 |
_level[i] = _max_level; |
|
438 | 438 |
++n; |
439 | 439 |
} |
440 | 440 |
} |
441 | 441 |
|
442 | 442 |
///Add an item to the current level. |
443 | 443 |
void initAddItem(Item i) |
444 | 444 |
{ |
445 | 445 |
swap(_where[i],_init_num); |
446 |
_level |
|
446 |
_level[i] = _init_lev; |
|
447 | 447 |
++_init_num; |
448 | 448 |
} |
449 | 449 |
|
450 | 450 |
///Start a new level. |
451 | 451 |
|
452 | 452 |
///Start a new level. |
453 | 453 |
///It shouldn't be used before the items on level 0 are listed. |
454 | 454 |
void initNewLevel() |
455 | 455 |
{ |
456 | 456 |
_init_lev++; |
457 | 457 |
_first[_init_lev]=_init_num; |
458 | 458 |
_last_active[_init_lev]=_init_num-1; |
459 | 459 |
} |
460 | 460 |
|
461 | 461 |
///Finalize the initialization process. |
462 | 462 |
void initFinish() |
463 | 463 |
{ |
464 | 464 |
for(_init_lev++;_init_lev<=_max_level;_init_lev++) |
465 | 465 |
{ |
466 | 466 |
_first[_init_lev]=_init_num; |
467 | 467 |
_last_active[_init_lev]=_init_num-1; |
468 | 468 |
} |
469 | 469 |
_first[_max_level+1]=&_items[0]+_item_num; |
470 | 470 |
_last_active[_max_level+1]=&_items[0]+_item_num-1; |
471 | 471 |
_highest_active = -1; |
472 | 472 |
} |
473 | 473 |
|
474 | 474 |
///@} |
475 | 475 |
|
476 | 476 |
}; |
477 | 477 |
|
478 | 478 |
///Class for handling "labels" in push-relabel type algorithms. |
479 | 479 |
|
480 | 480 |
///A class for handling "labels" in push-relabel type algorithms. |
481 | 481 |
/// |
482 | 482 |
///\ingroup auxdat |
483 | 483 |
///Using this class you can assign "labels" (nonnegative integer numbers) |
484 | 484 |
///to the edges or nodes of a graph, manipulate and query them through |
485 | 485 |
///operations typically arising in "push-relabel" type algorithms. |
486 | 486 |
/// |
487 | 487 |
///Each item is either \em active or not, and you can also choose a |
488 | 488 |
///highest level active item. |
489 | 489 |
/// |
490 | 490 |
///\sa Elevator |
491 | 491 |
/// |
492 | 492 |
///\param GR Type of the underlying graph. |
493 | 493 |
///\param Item Type of the items the data is assigned to (\c GR::Node, |
494 | 494 |
///\c GR::Arc or \c GR::Edge). |
495 | 495 |
template <class GR, class Item> |
496 | 496 |
class LinkedElevator { |
497 | 497 |
public: |
498 | 498 |
|
499 | 499 |
typedef Item Key; |
500 | 500 |
typedef int Value; |
501 | 501 |
|
502 | 502 |
private: |
503 | 503 |
|
504 | 504 |
typedef typename ItemSetTraits<GR,Item>:: |
505 | 505 |
template Map<Item>::Type ItemMap; |
506 | 506 |
typedef typename ItemSetTraits<GR,Item>:: |
507 | 507 |
template Map<int>::Type IntMap; |
508 | 508 |
typedef typename ItemSetTraits<GR,Item>:: |
509 | 509 |
template Map<bool>::Type BoolMap; |
510 | 510 |
|
511 | 511 |
const GR &_graph; |
512 | 512 |
int _max_level; |
513 | 513 |
int _item_num; |
514 | 514 |
std::vector<Item> _first, _last; |
515 | 515 |
ItemMap _prev, _next; |
516 | 516 |
int _highest_active; |
517 | 517 |
IntMap _level; |
518 | 518 |
BoolMap _active; |
519 | 519 |
|
520 | 520 |
public: |
521 | 521 |
///Constructor with given maximum level. |
522 | 522 |
|
523 | 523 |
///Constructor with given maximum level. |
524 | 524 |
/// |
525 | 525 |
///\param graph The underlying graph. |
526 | 526 |
///\param max_level The maximum allowed level. |
527 | 527 |
///Set the range of the possible labels to <tt>[0..max_level]</tt>. |
528 | 528 |
LinkedElevator(const GR& graph, int max_level) |
529 | 529 |
: _graph(graph), _max_level(max_level), _item_num(_max_level), |
530 | 530 |
_first(_max_level + 1), _last(_max_level + 1), |
531 | 531 |
_prev(graph), _next(graph), |
532 | 532 |
_highest_active(-1), _level(graph), _active(graph) {} |
533 | 533 |
|
534 | 534 |
///Constructor. |
535 | 535 |
|
536 | 536 |
///Constructor. |
537 | 537 |
/// |
538 | 538 |
///\param graph The underlying graph. |
539 | 539 |
///Set the range of the possible labels to <tt>[0..max_level]</tt>, |
540 | 540 |
///where \c max_level is equal to the number of labeled items in the graph. |
541 | 541 |
LinkedElevator(const GR& graph) |
542 | 542 |
: _graph(graph), _max_level(countItems<GR, Item>(graph)), |
543 | 543 |
_item_num(_max_level), |
544 | 544 |
_first(_max_level + 1), _last(_max_level + 1), |
545 | 545 |
_prev(graph, INVALID), _next(graph, INVALID), |
546 | 546 |
_highest_active(-1), _level(graph), _active(graph) {} |
547 | 547 |
|
548 | 548 |
|
549 | 549 |
///Activate item \c i. |
550 | 550 |
|
551 | 551 |
///Activate item \c i. |
552 | 552 |
///\pre Item \c i shouldn't be active before. |
553 | 553 |
void activate(Item i) { |
554 |
_active |
|
554 |
_active[i] = true; |
|
555 | 555 |
|
556 | 556 |
int level = _level[i]; |
557 | 557 |
if (level > _highest_active) { |
558 | 558 |
_highest_active = level; |
559 | 559 |
} |
560 | 560 |
|
561 | 561 |
if (_prev[i] == INVALID || _active[_prev[i]]) return; |
562 | 562 |
//unlace |
563 |
_next |
|
563 |
_next[_prev[i]] = _next[i]; |
|
564 | 564 |
if (_next[i] != INVALID) { |
565 |
_prev |
|
565 |
_prev[_next[i]] = _prev[i]; |
|
566 | 566 |
} else { |
567 | 567 |
_last[level] = _prev[i]; |
568 | 568 |
} |
569 | 569 |
//lace |
570 |
_next.set(i, _first[level]); |
|
571 |
_prev.set(_first[level], i); |
|
572 |
|
|
570 |
_next[i] = _first[level]; |
|
571 |
_prev[_first[level]] = i; |
|
572 |
_prev[i] = INVALID; |
|
573 | 573 |
_first[level] = i; |
574 | 574 |
|
575 | 575 |
} |
576 | 576 |
|
577 | 577 |
///Deactivate item \c i. |
578 | 578 |
|
579 | 579 |
///Deactivate item \c i. |
580 | 580 |
///\pre Item \c i must be active before. |
581 | 581 |
void deactivate(Item i) { |
582 |
_active |
|
582 |
_active[i] = false; |
|
583 | 583 |
int level = _level[i]; |
584 | 584 |
|
585 | 585 |
if (_next[i] == INVALID || !_active[_next[i]]) |
586 | 586 |
goto find_highest_level; |
587 | 587 |
|
588 | 588 |
//unlace |
589 |
_prev |
|
589 |
_prev[_next[i]] = _prev[i]; |
|
590 | 590 |
if (_prev[i] != INVALID) { |
591 |
_next |
|
591 |
_next[_prev[i]] = _next[i]; |
|
592 | 592 |
} else { |
593 | 593 |
_first[_level[i]] = _next[i]; |
594 | 594 |
} |
595 | 595 |
//lace |
596 |
_prev.set(i, _last[level]); |
|
597 |
_next.set(_last[level], i); |
|
598 |
|
|
596 |
_prev[i] = _last[level]; |
|
597 |
_next[_last[level]] = i; |
|
598 |
_next[i] = INVALID; |
|
599 | 599 |
_last[level] = i; |
600 | 600 |
|
601 | 601 |
find_highest_level: |
602 | 602 |
if (level == _highest_active) { |
603 | 603 |
while (_highest_active >= 0 && activeFree(_highest_active)) |
604 | 604 |
--_highest_active; |
605 | 605 |
} |
606 | 606 |
} |
607 | 607 |
|
608 | 608 |
///Query whether item \c i is active |
609 | 609 |
bool active(Item i) const { return _active[i]; } |
610 | 610 |
|
611 | 611 |
///Return the level of item \c i. |
612 | 612 |
int operator[](Item i) const { return _level[i]; } |
613 | 613 |
|
614 | 614 |
///Return the number of items on level \c l. |
615 | 615 |
int onLevel(int l) const { |
616 | 616 |
int num = 0; |
617 | 617 |
Item n = _first[l]; |
618 | 618 |
while (n != INVALID) { |
619 | 619 |
++num; |
620 | 620 |
n = _next[n]; |
621 | 621 |
} |
622 | 622 |
return num; |
623 | 623 |
} |
624 | 624 |
|
625 | 625 |
///Return true if the level is empty. |
626 | 626 |
bool emptyLevel(int l) const { |
627 | 627 |
return _first[l] == INVALID; |
628 | 628 |
} |
629 | 629 |
|
630 | 630 |
///Return the number of items above level \c l. |
631 | 631 |
int aboveLevel(int l) const { |
632 | 632 |
int num = 0; |
633 | 633 |
for (int level = l + 1; level < _max_level; ++level) |
634 | 634 |
num += onLevel(level); |
635 | 635 |
return num; |
636 | 636 |
} |
637 | 637 |
|
638 | 638 |
///Return the number of active items on level \c l. |
639 | 639 |
int activesOnLevel(int l) const { |
640 | 640 |
int num = 0; |
641 | 641 |
Item n = _first[l]; |
642 | 642 |
while (n != INVALID && _active[n]) { |
643 | 643 |
++num; |
644 | 644 |
n = _next[n]; |
645 | 645 |
} |
646 | 646 |
return num; |
647 | 647 |
} |
648 | 648 |
|
649 | 649 |
///Return true if there is no active item on level \c l. |
650 | 650 |
bool activeFree(int l) const { |
651 | 651 |
return _first[l] == INVALID || !_active[_first[l]]; |
652 | 652 |
} |
653 | 653 |
|
654 | 654 |
///Return the maximum allowed level. |
655 | 655 |
int maxLevel() const { |
656 | 656 |
return _max_level; |
657 | 657 |
} |
658 | 658 |
|
659 | 659 |
///\name Highest Active Item |
660 | 660 |
///Functions for working with the highest level |
661 | 661 |
///active item. |
662 | 662 |
|
663 | 663 |
///@{ |
664 | 664 |
|
665 | 665 |
///Return a highest level active item. |
666 | 666 |
|
667 | 667 |
///Return a highest level active item or INVALID if there is no active |
668 | 668 |
///item. |
669 | 669 |
Item highestActive() const { |
670 | 670 |
return _highest_active >= 0 ? _first[_highest_active] : INVALID; |
671 | 671 |
} |
672 | 672 |
|
673 | 673 |
///Return the highest active level. |
674 | 674 |
|
675 | 675 |
///Return the level of the highest active item or -1 if there is no active |
676 | 676 |
///item. |
677 | 677 |
int highestActiveLevel() const { |
678 | 678 |
return _highest_active; |
679 | 679 |
} |
680 | 680 |
|
681 | 681 |
///Lift the highest active item by one. |
682 | 682 |
|
683 | 683 |
///Lift the item returned by highestActive() by one. |
684 | 684 |
/// |
685 | 685 |
void liftHighestActive() { |
686 | 686 |
Item i = _first[_highest_active]; |
687 | 687 |
if (_next[i] != INVALID) { |
688 |
_prev |
|
688 |
_prev[_next[i]] = INVALID; |
|
689 | 689 |
_first[_highest_active] = _next[i]; |
690 | 690 |
} else { |
691 | 691 |
_first[_highest_active] = INVALID; |
692 | 692 |
_last[_highest_active] = INVALID; |
693 | 693 |
} |
694 |
_level |
|
694 |
_level[i] = ++_highest_active; |
|
695 | 695 |
if (_first[_highest_active] == INVALID) { |
696 | 696 |
_first[_highest_active] = i; |
697 | 697 |
_last[_highest_active] = i; |
698 |
_prev.set(i, INVALID); |
|
699 |
_next.set(i, INVALID); |
|
698 |
_prev[i] = INVALID; |
|
699 |
_next[i] = INVALID; |
|
700 | 700 |
} else { |
701 |
_prev.set(_first[_highest_active], i); |
|
702 |
_next.set(i, _first[_highest_active]); |
|
701 |
_prev[_first[_highest_active]] = i; |
|
702 |
_next[i] = _first[_highest_active]; |
|
703 | 703 |
_first[_highest_active] = i; |
704 | 704 |
} |
705 | 705 |
} |
706 | 706 |
|
707 | 707 |
///Lift the highest active item to the given level. |
708 | 708 |
|
709 | 709 |
///Lift the item returned by highestActive() to level \c new_level. |
710 | 710 |
/// |
711 | 711 |
///\warning \c new_level must be strictly higher |
712 | 712 |
///than the current level. |
713 | 713 |
/// |
714 | 714 |
void liftHighestActive(int new_level) { |
715 | 715 |
Item i = _first[_highest_active]; |
716 | 716 |
if (_next[i] != INVALID) { |
717 |
_prev |
|
717 |
_prev[_next[i]] = INVALID; |
|
718 | 718 |
_first[_highest_active] = _next[i]; |
719 | 719 |
} else { |
720 | 720 |
_first[_highest_active] = INVALID; |
721 | 721 |
_last[_highest_active] = INVALID; |
722 | 722 |
} |
723 |
_level |
|
723 |
_level[i] = _highest_active = new_level; |
|
724 | 724 |
if (_first[_highest_active] == INVALID) { |
725 | 725 |
_first[_highest_active] = _last[_highest_active] = i; |
726 |
_prev.set(i, INVALID); |
|
727 |
_next.set(i, INVALID); |
|
726 |
_prev[i] = INVALID; |
|
727 |
_next[i] = INVALID; |
|
728 | 728 |
} else { |
729 |
_prev.set(_first[_highest_active], i); |
|
730 |
_next.set(i, _first[_highest_active]); |
|
729 |
_prev[_first[_highest_active]] = i; |
|
730 |
_next[i] = _first[_highest_active]; |
|
731 | 731 |
_first[_highest_active] = i; |
732 | 732 |
} |
733 | 733 |
} |
734 | 734 |
|
735 | 735 |
///Lift the highest active item to the top level. |
736 | 736 |
|
737 | 737 |
///Lift the item returned by highestActive() to the top level and |
738 | 738 |
///deactivate it. |
739 | 739 |
void liftHighestActiveToTop() { |
740 | 740 |
Item i = _first[_highest_active]; |
741 |
_level |
|
741 |
_level[i] = _max_level; |
|
742 | 742 |
if (_next[i] != INVALID) { |
743 |
_prev |
|
743 |
_prev[_next[i]] = INVALID; |
|
744 | 744 |
_first[_highest_active] = _next[i]; |
745 | 745 |
} else { |
746 | 746 |
_first[_highest_active] = INVALID; |
747 | 747 |
_last[_highest_active] = INVALID; |
748 | 748 |
} |
749 | 749 |
while (_highest_active >= 0 && activeFree(_highest_active)) |
750 | 750 |
--_highest_active; |
751 | 751 |
} |
752 | 752 |
|
753 | 753 |
///@} |
754 | 754 |
|
755 | 755 |
///\name Active Item on Certain Level |
756 | 756 |
///Functions for working with the active items. |
757 | 757 |
|
758 | 758 |
///@{ |
759 | 759 |
|
760 | 760 |
///Return an active item on level \c l. |
761 | 761 |
|
762 | 762 |
///Return an active item on level \c l or \ref INVALID if there is no such |
763 | 763 |
///an item. (\c l must be from the range [0...\c max_level]. |
764 | 764 |
Item activeOn(int l) const |
765 | 765 |
{ |
766 | 766 |
return _active[_first[l]] ? _first[l] : INVALID; |
767 | 767 |
} |
768 | 768 |
|
769 | 769 |
///Lift the active item returned by \c activeOn(l) by one. |
770 | 770 |
|
771 | 771 |
///Lift the active item returned by \ref activeOn() "activeOn(l)" |
772 | 772 |
///by one. |
773 | 773 |
Item liftActiveOn(int l) |
774 | 774 |
{ |
775 | 775 |
Item i = _first[l]; |
776 | 776 |
if (_next[i] != INVALID) { |
777 |
_prev |
|
777 |
_prev[_next[i]] = INVALID; |
|
778 | 778 |
_first[l] = _next[i]; |
779 | 779 |
} else { |
780 | 780 |
_first[l] = INVALID; |
781 | 781 |
_last[l] = INVALID; |
782 | 782 |
} |
783 |
_level |
|
783 |
_level[i] = ++l; |
|
784 | 784 |
if (_first[l] == INVALID) { |
785 | 785 |
_first[l] = _last[l] = i; |
786 |
_prev.set(i, INVALID); |
|
787 |
_next.set(i, INVALID); |
|
786 |
_prev[i] = INVALID; |
|
787 |
_next[i] = INVALID; |
|
788 | 788 |
} else { |
789 |
_prev.set(_first[l], i); |
|
790 |
_next.set(i, _first[l]); |
|
789 |
_prev[_first[l]] = i; |
|
790 |
_next[i] = _first[l]; |
|
791 | 791 |
_first[l] = i; |
792 | 792 |
} |
793 | 793 |
if (_highest_active < l) { |
794 | 794 |
_highest_active = l; |
795 | 795 |
} |
796 | 796 |
} |
797 | 797 |
|
798 | 798 |
///Lift the active item returned by \c activeOn(l) to the given level. |
799 | 799 |
|
800 | 800 |
///Lift the active item returned by \ref activeOn() "activeOn(l)" |
801 | 801 |
///to the given level. |
802 | 802 |
void liftActiveOn(int l, int new_level) |
803 | 803 |
{ |
804 | 804 |
Item i = _first[l]; |
805 | 805 |
if (_next[i] != INVALID) { |
806 |
_prev |
|
806 |
_prev[_next[i]] = INVALID; |
|
807 | 807 |
_first[l] = _next[i]; |
808 | 808 |
} else { |
809 | 809 |
_first[l] = INVALID; |
810 | 810 |
_last[l] = INVALID; |
811 | 811 |
} |
812 |
_level |
|
812 |
_level[i] = l = new_level; |
|
813 | 813 |
if (_first[l] == INVALID) { |
814 | 814 |
_first[l] = _last[l] = i; |
815 |
_prev.set(i, INVALID); |
|
816 |
_next.set(i, INVALID); |
|
815 |
_prev[i] = INVALID; |
|
816 |
_next[i] = INVALID; |
|
817 | 817 |
} else { |
818 |
_prev.set(_first[l], i); |
|
819 |
_next.set(i, _first[l]); |
|
818 |
_prev[_first[l]] = i; |
|
819 |
_next[i] = _first[l]; |
|
820 | 820 |
_first[l] = i; |
821 | 821 |
} |
822 | 822 |
if (_highest_active < l) { |
823 | 823 |
_highest_active = l; |
824 | 824 |
} |
825 | 825 |
} |
826 | 826 |
|
827 | 827 |
///Lift the active item returned by \c activeOn(l) to the top level. |
828 | 828 |
|
829 | 829 |
///Lift the active item returned by \ref activeOn() "activeOn(l)" |
830 | 830 |
///to the top level and deactivate it. |
831 | 831 |
void liftActiveToTop(int l) |
832 | 832 |
{ |
833 | 833 |
Item i = _first[l]; |
834 | 834 |
if (_next[i] != INVALID) { |
835 |
_prev |
|
835 |
_prev[_next[i]] = INVALID; |
|
836 | 836 |
_first[l] = _next[i]; |
837 | 837 |
} else { |
838 | 838 |
_first[l] = INVALID; |
839 | 839 |
_last[l] = INVALID; |
840 | 840 |
} |
841 |
_level |
|
841 |
_level[i] = _max_level; |
|
842 | 842 |
if (l == _highest_active) { |
843 | 843 |
while (_highest_active >= 0 && activeFree(_highest_active)) |
844 | 844 |
--_highest_active; |
845 | 845 |
} |
846 | 846 |
} |
847 | 847 |
|
848 | 848 |
///@} |
849 | 849 |
|
850 | 850 |
/// \brief Lift an active item to a higher level. |
851 | 851 |
/// |
852 | 852 |
/// Lift an active item to a higher level. |
853 | 853 |
/// \param i The item to be lifted. It must be active. |
854 | 854 |
/// \param new_level The new level of \c i. It must be strictly higher |
855 | 855 |
/// than the current level. |
856 | 856 |
/// |
857 | 857 |
void lift(Item i, int new_level) { |
858 | 858 |
if (_next[i] != INVALID) { |
859 |
_prev |
|
859 |
_prev[_next[i]] = _prev[i]; |
|
860 | 860 |
} else { |
861 | 861 |
_last[new_level] = _prev[i]; |
862 | 862 |
} |
863 | 863 |
if (_prev[i] != INVALID) { |
864 |
_next |
|
864 |
_next[_prev[i]] = _next[i]; |
|
865 | 865 |
} else { |
866 | 866 |
_first[new_level] = _next[i]; |
867 | 867 |
} |
868 |
_level |
|
868 |
_level[i] = new_level; |
|
869 | 869 |
if (_first[new_level] == INVALID) { |
870 | 870 |
_first[new_level] = _last[new_level] = i; |
871 |
_prev.set(i, INVALID); |
|
872 |
_next.set(i, INVALID); |
|
871 |
_prev[i] = INVALID; |
|
872 |
_next[i] = INVALID; |
|
873 | 873 |
} else { |
874 |
_prev.set(_first[new_level], i); |
|
875 |
_next.set(i, _first[new_level]); |
|
874 |
_prev[_first[new_level]] = i; |
|
875 |
_next[i] = _first[new_level]; |
|
876 | 876 |
_first[new_level] = i; |
877 | 877 |
} |
878 | 878 |
if (_highest_active < new_level) { |
879 | 879 |
_highest_active = new_level; |
880 | 880 |
} |
881 | 881 |
} |
882 | 882 |
|
883 | 883 |
///Move an inactive item to the top but one level (in a dirty way). |
884 | 884 |
|
885 | 885 |
///This function moves an inactive item from the top level to the top |
886 | 886 |
///but one level (in a dirty way). |
887 | 887 |
///\warning It makes the underlying datastructure corrupt, so use it |
888 | 888 |
///only if you really know what it is for. |
889 | 889 |
///\pre The item is on the top level. |
890 | 890 |
void dirtyTopButOne(Item i) { |
891 |
_level |
|
891 |
_level[i] = _max_level - 1; |
|
892 | 892 |
} |
893 | 893 |
|
894 | 894 |
///Lift all items on and above the given level to the top level. |
895 | 895 |
|
896 | 896 |
///This function lifts all items on and above level \c l to the top |
897 | 897 |
///level and deactivates them. |
898 | 898 |
void liftToTop(int l) { |
899 | 899 |
for (int i = l + 1; _first[i] != INVALID; ++i) { |
900 | 900 |
Item n = _first[i]; |
901 | 901 |
while (n != INVALID) { |
902 |
_level |
|
902 |
_level[n] = _max_level; |
|
903 | 903 |
n = _next[n]; |
904 | 904 |
} |
905 | 905 |
_first[i] = INVALID; |
906 | 906 |
_last[i] = INVALID; |
907 | 907 |
} |
908 | 908 |
if (_highest_active > l - 1) { |
909 | 909 |
_highest_active = l - 1; |
910 | 910 |
while (_highest_active >= 0 && activeFree(_highest_active)) |
911 | 911 |
--_highest_active; |
912 | 912 |
} |
913 | 913 |
} |
914 | 914 |
|
915 | 915 |
private: |
916 | 916 |
|
917 | 917 |
int _init_level; |
918 | 918 |
|
919 | 919 |
public: |
920 | 920 |
|
921 | 921 |
///\name Initialization |
922 | 922 |
///Using these functions you can initialize the levels of the items. |
923 | 923 |
///\n |
924 | 924 |
///The initialization must be started with calling \c initStart(). |
925 | 925 |
///Then the items should be listed level by level starting with the |
926 | 926 |
///lowest one (level 0) using \c initAddItem() and \c initNewLevel(). |
927 | 927 |
///Finally \c initFinish() must be called. |
928 | 928 |
///The items not listed are put on the highest level. |
929 | 929 |
///@{ |
930 | 930 |
|
931 | 931 |
///Start the initialization process. |
932 | 932 |
void initStart() { |
933 | 933 |
|
934 | 934 |
for (int i = 0; i <= _max_level; ++i) { |
935 | 935 |
_first[i] = _last[i] = INVALID; |
936 | 936 |
} |
937 | 937 |
_init_level = 0; |
938 | 938 |
for(typename ItemSetTraits<GR,Item>::ItemIt i(_graph); |
939 | 939 |
i != INVALID; ++i) { |
940 |
_level.set(i, _max_level); |
|
941 |
_active.set(i, false); |
|
940 |
_level[i] = _max_level; |
|
941 |
_active[i] = false; |
|
942 | 942 |
} |
943 | 943 |
} |
944 | 944 |
|
945 | 945 |
///Add an item to the current level. |
946 | 946 |
void initAddItem(Item i) { |
947 |
_level |
|
947 |
_level[i] = _init_level; |
|
948 | 948 |
if (_last[_init_level] == INVALID) { |
949 | 949 |
_first[_init_level] = i; |
950 | 950 |
_last[_init_level] = i; |
951 |
_prev.set(i, INVALID); |
|
952 |
_next.set(i, INVALID); |
|
951 |
_prev[i] = INVALID; |
|
952 |
_next[i] = INVALID; |
|
953 | 953 |
} else { |
954 |
_prev.set(i, _last[_init_level]); |
|
955 |
_next.set(i, INVALID); |
|
956 |
|
|
954 |
_prev[i] = _last[_init_level]; |
|
955 |
_next[i] = INVALID; |
|
956 |
_next[_last[_init_level]] = i; |
|
957 | 957 |
_last[_init_level] = i; |
958 | 958 |
} |
959 | 959 |
} |
960 | 960 |
|
961 | 961 |
///Start a new level. |
962 | 962 |
|
963 | 963 |
///Start a new level. |
964 | 964 |
///It shouldn't be used before the items on level 0 are listed. |
965 | 965 |
void initNewLevel() { |
966 | 966 |
++_init_level; |
967 | 967 |
} |
968 | 968 |
|
969 | 969 |
///Finalize the initialization process. |
970 | 970 |
void initFinish() { |
971 | 971 |
_highest_active = -1; |
972 | 972 |
} |
973 | 973 |
|
974 | 974 |
///@} |
975 | 975 |
|
976 | 976 |
}; |
977 | 977 |
|
978 | 978 |
|
979 | 979 |
} //END OF NAMESPACE LEMON |
980 | 980 |
|
981 | 981 |
#endif |
982 | 982 |
... | ... |
@@ -50,361 +50,361 @@ |
50 | 50 |
/// the \ref Preflow algorithm), therefore the algorithm has |
51 | 51 |
/// \f$(O(n^3\sqrt{e})\f$ overall time complexity. It calculates a |
52 | 52 |
/// rooted Gomory-Hu tree, its structure and the weights can be obtained |
53 | 53 |
/// by \c predNode(), \c predValue() and \c rootDist(). |
54 | 54 |
/// |
55 | 55 |
/// The members \c minCutMap() and \c minCutValue() calculate |
56 | 56 |
/// the minimum cut and the minimum cut value between any two nodes |
57 | 57 |
/// in the graph. You can also list (iterate on) the nodes and the |
58 | 58 |
/// edges of the cuts using \c MinCutNodeIt and \c MinCutEdgeIt. |
59 | 59 |
/// |
60 | 60 |
/// \tparam GR The type of the undirected graph the algorithm runs on. |
61 | 61 |
/// \tparam CAP The type of the edge map describing the edge capacities. |
62 | 62 |
/// It is \ref concepts::Graph::EdgeMap "GR::EdgeMap<int>" by default. |
63 | 63 |
#ifdef DOXYGEN |
64 | 64 |
template <typename GR, |
65 | 65 |
typename CAP> |
66 | 66 |
#else |
67 | 67 |
template <typename GR, |
68 | 68 |
typename CAP = typename GR::template EdgeMap<int> > |
69 | 69 |
#endif |
70 | 70 |
class GomoryHu { |
71 | 71 |
public: |
72 | 72 |
|
73 | 73 |
/// The graph type |
74 | 74 |
typedef GR Graph; |
75 | 75 |
/// The type of the edge capacity map |
76 | 76 |
typedef CAP Capacity; |
77 | 77 |
/// The value type of capacities |
78 | 78 |
typedef typename Capacity::Value Value; |
79 | 79 |
|
80 | 80 |
private: |
81 | 81 |
|
82 | 82 |
TEMPLATE_GRAPH_TYPEDEFS(Graph); |
83 | 83 |
|
84 | 84 |
const Graph& _graph; |
85 | 85 |
const Capacity& _capacity; |
86 | 86 |
|
87 | 87 |
Node _root; |
88 | 88 |
typename Graph::template NodeMap<Node>* _pred; |
89 | 89 |
typename Graph::template NodeMap<Value>* _weight; |
90 | 90 |
typename Graph::template NodeMap<int>* _order; |
91 | 91 |
|
92 | 92 |
void createStructures() { |
93 | 93 |
if (!_pred) { |
94 | 94 |
_pred = new typename Graph::template NodeMap<Node>(_graph); |
95 | 95 |
} |
96 | 96 |
if (!_weight) { |
97 | 97 |
_weight = new typename Graph::template NodeMap<Value>(_graph); |
98 | 98 |
} |
99 | 99 |
if (!_order) { |
100 | 100 |
_order = new typename Graph::template NodeMap<int>(_graph); |
101 | 101 |
} |
102 | 102 |
} |
103 | 103 |
|
104 | 104 |
void destroyStructures() { |
105 | 105 |
if (_pred) { |
106 | 106 |
delete _pred; |
107 | 107 |
} |
108 | 108 |
if (_weight) { |
109 | 109 |
delete _weight; |
110 | 110 |
} |
111 | 111 |
if (_order) { |
112 | 112 |
delete _order; |
113 | 113 |
} |
114 | 114 |
} |
115 | 115 |
|
116 | 116 |
public: |
117 | 117 |
|
118 | 118 |
/// \brief Constructor |
119 | 119 |
/// |
120 | 120 |
/// Constructor |
121 | 121 |
/// \param graph The undirected graph the algorithm runs on. |
122 | 122 |
/// \param capacity The edge capacity map. |
123 | 123 |
GomoryHu(const Graph& graph, const Capacity& capacity) |
124 | 124 |
: _graph(graph), _capacity(capacity), |
125 | 125 |
_pred(0), _weight(0), _order(0) |
126 | 126 |
{ |
127 | 127 |
checkConcept<concepts::ReadMap<Edge, Value>, Capacity>(); |
128 | 128 |
} |
129 | 129 |
|
130 | 130 |
|
131 | 131 |
/// \brief Destructor |
132 | 132 |
/// |
133 | 133 |
/// Destructor |
134 | 134 |
~GomoryHu() { |
135 | 135 |
destroyStructures(); |
136 | 136 |
} |
137 | 137 |
|
138 | 138 |
private: |
139 | 139 |
|
140 | 140 |
// Initialize the internal data structures |
141 | 141 |
void init() { |
142 | 142 |
createStructures(); |
143 | 143 |
|
144 | 144 |
_root = NodeIt(_graph); |
145 | 145 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
146 |
_pred->set(n, _root); |
|
147 |
_order->set(n, -1); |
|
146 |
(*_pred)[n] = _root; |
|
147 |
(*_order)[n] = -1; |
|
148 | 148 |
} |
149 |
_pred->set(_root, INVALID); |
|
150 |
_weight->set(_root, std::numeric_limits<Value>::max()); |
|
149 |
(*_pred)[_root] = INVALID; |
|
150 |
(*_weight)[_root] = std::numeric_limits<Value>::max(); |
|
151 | 151 |
} |
152 | 152 |
|
153 | 153 |
|
154 | 154 |
// Start the algorithm |
155 | 155 |
void start() { |
156 | 156 |
Preflow<Graph, Capacity> fa(_graph, _capacity, _root, INVALID); |
157 | 157 |
|
158 | 158 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
159 | 159 |
if (n == _root) continue; |
160 | 160 |
|
161 | 161 |
Node pn = (*_pred)[n]; |
162 | 162 |
fa.source(n); |
163 | 163 |
fa.target(pn); |
164 | 164 |
|
165 | 165 |
fa.runMinCut(); |
166 | 166 |
|
167 |
_weight |
|
167 |
(*_weight)[n] = fa.flowValue(); |
|
168 | 168 |
|
169 | 169 |
for (NodeIt nn(_graph); nn != INVALID; ++nn) { |
170 | 170 |
if (nn != n && fa.minCut(nn) && (*_pred)[nn] == pn) { |
171 |
_pred |
|
171 |
(*_pred)[nn] = n; |
|
172 | 172 |
} |
173 | 173 |
} |
174 | 174 |
if ((*_pred)[pn] != INVALID && fa.minCut((*_pred)[pn])) { |
175 |
_pred->set(n, (*_pred)[pn]); |
|
176 |
_pred->set(pn, n); |
|
177 |
_weight->set(n, (*_weight)[pn]); |
|
178 |
_weight->set(pn, fa.flowValue()); |
|
175 |
(*_pred)[n] = (*_pred)[pn]; |
|
176 |
(*_pred)[pn] = n; |
|
177 |
(*_weight)[n] = (*_weight)[pn]; |
|
178 |
(*_weight)[pn] = fa.flowValue(); |
|
179 | 179 |
} |
180 | 180 |
} |
181 | 181 |
|
182 |
_order |
|
182 |
(*_order)[_root] = 0; |
|
183 | 183 |
int index = 1; |
184 | 184 |
|
185 | 185 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
186 | 186 |
std::vector<Node> st; |
187 | 187 |
Node nn = n; |
188 | 188 |
while ((*_order)[nn] == -1) { |
189 | 189 |
st.push_back(nn); |
190 | 190 |
nn = (*_pred)[nn]; |
191 | 191 |
} |
192 | 192 |
while (!st.empty()) { |
193 |
_order |
|
193 |
(*_order)[st.back()] = index++; |
|
194 | 194 |
st.pop_back(); |
195 | 195 |
} |
196 | 196 |
} |
197 | 197 |
} |
198 | 198 |
|
199 | 199 |
public: |
200 | 200 |
|
201 | 201 |
///\name Execution Control |
202 | 202 |
|
203 | 203 |
///@{ |
204 | 204 |
|
205 | 205 |
/// \brief Run the Gomory-Hu algorithm. |
206 | 206 |
/// |
207 | 207 |
/// This function runs the Gomory-Hu algorithm. |
208 | 208 |
void run() { |
209 | 209 |
init(); |
210 | 210 |
start(); |
211 | 211 |
} |
212 | 212 |
|
213 | 213 |
/// @} |
214 | 214 |
|
215 | 215 |
///\name Query Functions |
216 | 216 |
///The results of the algorithm can be obtained using these |
217 | 217 |
///functions.\n |
218 | 218 |
///\ref run() "run()" should be called before using them.\n |
219 | 219 |
///See also \ref MinCutNodeIt and \ref MinCutEdgeIt. |
220 | 220 |
|
221 | 221 |
///@{ |
222 | 222 |
|
223 | 223 |
/// \brief Return the predecessor node in the Gomory-Hu tree. |
224 | 224 |
/// |
225 | 225 |
/// This function returns the predecessor node in the Gomory-Hu tree. |
226 | 226 |
/// If the node is |
227 | 227 |
/// the root of the Gomory-Hu tree, then it returns \c INVALID. |
228 | 228 |
Node predNode(const Node& node) { |
229 | 229 |
return (*_pred)[node]; |
230 | 230 |
} |
231 | 231 |
|
232 | 232 |
/// \brief Return the distance from the root node in the Gomory-Hu tree. |
233 | 233 |
/// |
234 | 234 |
/// This function returns the distance of \c node from the root node |
235 | 235 |
/// in the Gomory-Hu tree. |
236 | 236 |
int rootDist(const Node& node) { |
237 | 237 |
return (*_order)[node]; |
238 | 238 |
} |
239 | 239 |
|
240 | 240 |
/// \brief Return the weight of the predecessor edge in the |
241 | 241 |
/// Gomory-Hu tree. |
242 | 242 |
/// |
243 | 243 |
/// This function returns the weight of the predecessor edge in the |
244 | 244 |
/// Gomory-Hu tree. If the node is the root, the result is undefined. |
245 | 245 |
Value predValue(const Node& node) { |
246 | 246 |
return (*_weight)[node]; |
247 | 247 |
} |
248 | 248 |
|
249 | 249 |
/// \brief Return the minimum cut value between two nodes |
250 | 250 |
/// |
251 | 251 |
/// This function returns the minimum cut value between two nodes. The |
252 | 252 |
/// algorithm finds the nearest common ancestor in the Gomory-Hu |
253 | 253 |
/// tree and calculates the minimum weight edge on the paths to |
254 | 254 |
/// the ancestor. |
255 | 255 |
Value minCutValue(const Node& s, const Node& t) const { |
256 | 256 |
Node sn = s, tn = t; |
257 | 257 |
Value value = std::numeric_limits<Value>::max(); |
258 | 258 |
|
259 | 259 |
while (sn != tn) { |
260 | 260 |
if ((*_order)[sn] < (*_order)[tn]) { |
261 | 261 |
if ((*_weight)[tn] <= value) value = (*_weight)[tn]; |
262 | 262 |
tn = (*_pred)[tn]; |
263 | 263 |
} else { |
264 | 264 |
if ((*_weight)[sn] <= value) value = (*_weight)[sn]; |
265 | 265 |
sn = (*_pred)[sn]; |
266 | 266 |
} |
267 | 267 |
} |
268 | 268 |
return value; |
269 | 269 |
} |
270 | 270 |
|
271 | 271 |
/// \brief Return the minimum cut between two nodes |
272 | 272 |
/// |
273 | 273 |
/// This function returns the minimum cut between the nodes \c s and \c t |
274 | 274 |
/// in the \c cutMap parameter by setting the nodes in the component of |
275 | 275 |
/// \c s to \c true and the other nodes to \c false. |
276 | 276 |
/// |
277 | 277 |
/// For higher level interfaces, see MinCutNodeIt and MinCutEdgeIt. |
278 | 278 |
template <typename CutMap> |
279 | 279 |
Value minCutMap(const Node& s, ///< The base node. |
280 | 280 |
const Node& t, |
281 | 281 |
///< The node you want to separate from node \c s. |
282 | 282 |
CutMap& cutMap |
283 | 283 |
///< The cut will be returned in this map. |
284 | 284 |
/// It must be a \c bool (or convertible) |
285 | 285 |
/// \ref concepts::ReadWriteMap "ReadWriteMap" |
286 | 286 |
/// on the graph nodes. |
287 | 287 |
) const { |
288 | 288 |
Node sn = s, tn = t; |
289 | 289 |
bool s_root=false; |
290 | 290 |
Node rn = INVALID; |
291 | 291 |
Value value = std::numeric_limits<Value>::max(); |
292 | 292 |
|
293 | 293 |
while (sn != tn) { |
294 | 294 |
if ((*_order)[sn] < (*_order)[tn]) { |
295 | 295 |
if ((*_weight)[tn] <= value) { |
296 | 296 |
rn = tn; |
297 | 297 |
s_root = false; |
298 | 298 |
value = (*_weight)[tn]; |
299 | 299 |
} |
300 | 300 |
tn = (*_pred)[tn]; |
301 | 301 |
} else { |
302 | 302 |
if ((*_weight)[sn] <= value) { |
303 | 303 |
rn = sn; |
304 | 304 |
s_root = true; |
305 | 305 |
value = (*_weight)[sn]; |
306 | 306 |
} |
307 | 307 |
sn = (*_pred)[sn]; |
308 | 308 |
} |
309 | 309 |
} |
310 | 310 |
|
311 | 311 |
typename Graph::template NodeMap<bool> reached(_graph, false); |
312 |
reached |
|
312 |
reached[_root] = true; |
|
313 | 313 |
cutMap.set(_root, !s_root); |
314 |
reached |
|
314 |
reached[rn] = true; |
|
315 | 315 |
cutMap.set(rn, s_root); |
316 | 316 |
|
317 | 317 |
std::vector<Node> st; |
318 | 318 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
319 | 319 |
st.clear(); |
320 | 320 |
Node nn = n; |
321 | 321 |
while (!reached[nn]) { |
322 | 322 |
st.push_back(nn); |
323 | 323 |
nn = (*_pred)[nn]; |
324 | 324 |
} |
325 | 325 |
while (!st.empty()) { |
326 | 326 |
cutMap.set(st.back(), cutMap[nn]); |
327 | 327 |
st.pop_back(); |
328 | 328 |
} |
329 | 329 |
} |
330 | 330 |
|
331 | 331 |
return value; |
332 | 332 |
} |
333 | 333 |
|
334 | 334 |
///@} |
335 | 335 |
|
336 | 336 |
friend class MinCutNodeIt; |
337 | 337 |
|
338 | 338 |
/// Iterate on the nodes of a minimum cut |
339 | 339 |
|
340 | 340 |
/// This iterator class lists the nodes of a minimum cut found by |
341 | 341 |
/// GomoryHu. Before using it, you must allocate a GomoryHu class, |
342 | 342 |
/// and call its \ref GomoryHu::run() "run()" method. |
343 | 343 |
/// |
344 | 344 |
/// This example counts the nodes in the minimum cut separating \c s from |
345 | 345 |
/// \c t. |
346 | 346 |
/// \code |
347 | 347 |
/// GomoruHu<Graph> gom(g, capacities); |
348 | 348 |
/// gom.run(); |
349 | 349 |
/// int cnt=0; |
350 | 350 |
/// for(GomoruHu<Graph>::MinCutNodeIt n(gom,s,t); n!=INVALID; ++n) ++cnt; |
351 | 351 |
/// \endcode |
352 | 352 |
class MinCutNodeIt |
353 | 353 |
{ |
354 | 354 |
bool _side; |
355 | 355 |
typename Graph::NodeIt _node_it; |
356 | 356 |
typename Graph::template NodeMap<bool> _cut; |
357 | 357 |
public: |
358 | 358 |
/// Constructor |
359 | 359 |
|
360 | 360 |
/// Constructor. |
361 | 361 |
/// |
362 | 362 |
MinCutNodeIt(GomoryHu const &gomory, |
363 | 363 |
///< The GomoryHu class. You must call its |
364 | 364 |
/// run() method |
365 | 365 |
/// before initializing this iterator. |
366 | 366 |
const Node& s, ///< The base node. |
367 | 367 |
const Node& t, |
368 | 368 |
///< The node you want to separate from node \c s. |
369 | 369 |
bool side=true |
370 | 370 |
///< If it is \c true (default) then the iterator lists |
371 | 371 |
/// the nodes of the component containing \c s, |
372 | 372 |
/// otherwise it lists the other component. |
373 | 373 |
/// \note As the minimum cut is not always unique, |
374 | 374 |
/// \code |
375 | 375 |
/// MinCutNodeIt(gomory, s, t, true); |
376 | 376 |
/// \endcode |
377 | 377 |
/// and |
378 | 378 |
/// \code |
379 | 379 |
/// MinCutNodeIt(gomory, t, s, false); |
380 | 380 |
/// \endcode |
381 | 381 |
/// does not necessarily give the same set of nodes. |
382 | 382 |
/// However it is ensured that |
383 | 383 |
/// \code |
384 | 384 |
/// MinCutNodeIt(gomory, s, t, true); |
385 | 385 |
/// \endcode |
386 | 386 |
/// and |
387 | 387 |
/// \code |
388 | 388 |
/// MinCutNodeIt(gomory, s, t, false); |
389 | 389 |
/// \endcode |
390 | 390 |
/// together list each node exactly once. |
391 | 391 |
) |
392 | 392 |
: _side(side), _cut(gomory._graph) |
393 | 393 |
{ |
394 | 394 |
gomory.minCutMap(s,t,_cut); |
395 | 395 |
for(_node_it=typename Graph::NodeIt(gomory._graph); |
396 | 396 |
_node_it!=INVALID && _cut[_node_it]!=_side; |
397 | 397 |
++_node_it) {} |
398 | 398 |
} |
399 | 399 |
/// Conversion to \c Node |
400 | 400 |
|
401 | 401 |
/// Conversion to \c Node. |
402 | 402 |
/// |
403 | 403 |
operator typename Graph::Node() const |
404 | 404 |
{ |
405 | 405 |
return _node_it; |
406 | 406 |
} |
407 | 407 |
bool operator==(Invalid) { return _node_it==INVALID; } |
408 | 408 |
bool operator!=(Invalid) { return _node_it!=INVALID; } |
409 | 409 |
/// Next node |
410 | 410 |
... | ... |
@@ -68,828 +68,828 @@ |
68 | 68 |
#ifdef DOXYGEN |
69 | 69 |
template <typename GR, typename CAP, typename TOL> |
70 | 70 |
#else |
71 | 71 |
template <typename GR, |
72 | 72 |
typename CAP = typename GR::template ArcMap<int>, |
73 | 73 |
typename TOL = Tolerance<typename CAP::Value> > |
74 | 74 |
#endif |
75 | 75 |
class HaoOrlin { |
76 | 76 |
private: |
77 | 77 |
|
78 | 78 |
typedef GR Digraph; |
79 | 79 |
typedef CAP CapacityMap; |
80 | 80 |
typedef TOL Tolerance; |
81 | 81 |
|
82 | 82 |
typedef typename CapacityMap::Value Value; |
83 | 83 |
|
84 | 84 |
TEMPLATE_GRAPH_TYPEDEFS(Digraph); |
85 | 85 |
|
86 | 86 |
const Digraph& _graph; |
87 | 87 |
const CapacityMap* _capacity; |
88 | 88 |
|
89 | 89 |
typedef typename Digraph::template ArcMap<Value> FlowMap; |
90 | 90 |
FlowMap* _flow; |
91 | 91 |
|
92 | 92 |
Node _source; |
93 | 93 |
|
94 | 94 |
int _node_num; |
95 | 95 |
|
96 | 96 |
// Bucketing structure |
97 | 97 |
std::vector<Node> _first, _last; |
98 | 98 |
typename Digraph::template NodeMap<Node>* _next; |
99 | 99 |
typename Digraph::template NodeMap<Node>* _prev; |
100 | 100 |
typename Digraph::template NodeMap<bool>* _active; |
101 | 101 |
typename Digraph::template NodeMap<int>* _bucket; |
102 | 102 |
|
103 | 103 |
std::vector<bool> _dormant; |
104 | 104 |
|
105 | 105 |
std::list<std::list<int> > _sets; |
106 | 106 |
std::list<int>::iterator _highest; |
107 | 107 |
|
108 | 108 |
typedef typename Digraph::template NodeMap<Value> ExcessMap; |
109 | 109 |
ExcessMap* _excess; |
110 | 110 |
|
111 | 111 |
typedef typename Digraph::template NodeMap<bool> SourceSetMap; |
112 | 112 |
SourceSetMap* _source_set; |
113 | 113 |
|
114 | 114 |
Value _min_cut; |
115 | 115 |
|
116 | 116 |
typedef typename Digraph::template NodeMap<bool> MinCutMap; |
117 | 117 |
MinCutMap* _min_cut_map; |
118 | 118 |
|
119 | 119 |
Tolerance _tolerance; |
120 | 120 |
|
121 | 121 |
public: |
122 | 122 |
|
123 | 123 |
/// \brief Constructor |
124 | 124 |
/// |
125 | 125 |
/// Constructor of the algorithm class. |
126 | 126 |
HaoOrlin(const Digraph& graph, const CapacityMap& capacity, |
127 | 127 |
const Tolerance& tolerance = Tolerance()) : |
128 | 128 |
_graph(graph), _capacity(&capacity), _flow(0), _source(), |
129 | 129 |
_node_num(), _first(), _last(), _next(0), _prev(0), |
130 | 130 |
_active(0), _bucket(0), _dormant(), _sets(), _highest(), |
131 | 131 |
_excess(0), _source_set(0), _min_cut(), _min_cut_map(0), |
132 | 132 |
_tolerance(tolerance) {} |
133 | 133 |
|
134 | 134 |
~HaoOrlin() { |
135 | 135 |
if (_min_cut_map) { |
136 | 136 |
delete _min_cut_map; |
137 | 137 |
} |
138 | 138 |
if (_source_set) { |
139 | 139 |
delete _source_set; |
140 | 140 |
} |
141 | 141 |
if (_excess) { |
142 | 142 |
delete _excess; |
143 | 143 |
} |
144 | 144 |
if (_next) { |
145 | 145 |
delete _next; |
146 | 146 |
} |
147 | 147 |
if (_prev) { |
148 | 148 |
delete _prev; |
149 | 149 |
} |
150 | 150 |
if (_active) { |
151 | 151 |
delete _active; |
152 | 152 |
} |
153 | 153 |
if (_bucket) { |
154 | 154 |
delete _bucket; |
155 | 155 |
} |
156 | 156 |
if (_flow) { |
157 | 157 |
delete _flow; |
158 | 158 |
} |
159 | 159 |
} |
160 | 160 |
|
161 | 161 |
private: |
162 | 162 |
|
163 | 163 |
void activate(const Node& i) { |
164 |
_active |
|
164 |
(*_active)[i] = true; |
|
165 | 165 |
|
166 | 166 |
int bucket = (*_bucket)[i]; |
167 | 167 |
|
168 | 168 |
if ((*_prev)[i] == INVALID || (*_active)[(*_prev)[i]]) return; |
169 | 169 |
//unlace |
170 |
_next |
|
170 |
(*_next)[(*_prev)[i]] = (*_next)[i]; |
|
171 | 171 |
if ((*_next)[i] != INVALID) { |
172 |
_prev |
|
172 |
(*_prev)[(*_next)[i]] = (*_prev)[i]; |
|
173 | 173 |
} else { |
174 | 174 |
_last[bucket] = (*_prev)[i]; |
175 | 175 |
} |
176 | 176 |
//lace |
177 |
_next->set(i, _first[bucket]); |
|
178 |
_prev->set(_first[bucket], i); |
|
179 |
|
|
177 |
(*_next)[i] = _first[bucket]; |
|
178 |
(*_prev)[_first[bucket]] = i; |
|
179 |
(*_prev)[i] = INVALID; |
|
180 | 180 |
_first[bucket] = i; |
181 | 181 |
} |
182 | 182 |
|
183 | 183 |
void deactivate(const Node& i) { |
184 |
_active |
|
184 |
(*_active)[i] = false; |
|
185 | 185 |
int bucket = (*_bucket)[i]; |
186 | 186 |
|
187 | 187 |
if ((*_next)[i] == INVALID || !(*_active)[(*_next)[i]]) return; |
188 | 188 |
|
189 | 189 |
//unlace |
190 |
_prev |
|
190 |
(*_prev)[(*_next)[i]] = (*_prev)[i]; |
|
191 | 191 |
if ((*_prev)[i] != INVALID) { |
192 |
_next |
|
192 |
(*_next)[(*_prev)[i]] = (*_next)[i]; |
|
193 | 193 |
} else { |
194 | 194 |
_first[bucket] = (*_next)[i]; |
195 | 195 |
} |
196 | 196 |
//lace |
197 |
_prev->set(i, _last[bucket]); |
|
198 |
_next->set(_last[bucket], i); |
|
199 |
|
|
197 |
(*_prev)[i] = _last[bucket]; |
|
198 |
(*_next)[_last[bucket]] = i; |
|
199 |
(*_next)[i] = INVALID; |
|
200 | 200 |
_last[bucket] = i; |
201 | 201 |
} |
202 | 202 |
|
203 | 203 |
void addItem(const Node& i, int bucket) { |
204 | 204 |
(*_bucket)[i] = bucket; |
205 | 205 |
if (_last[bucket] != INVALID) { |
206 |
_prev->set(i, _last[bucket]); |
|
207 |
_next->set(_last[bucket], i); |
|
208 |
|
|
206 |
(*_prev)[i] = _last[bucket]; |
|
207 |
(*_next)[_last[bucket]] = i; |
|
208 |
(*_next)[i] = INVALID; |
|
209 | 209 |
_last[bucket] = i; |
210 | 210 |
} else { |
211 |
_prev |
|
211 |
(*_prev)[i] = INVALID; |
|
212 | 212 |
_first[bucket] = i; |
213 |
_next |
|
213 |
(*_next)[i] = INVALID; |
|
214 | 214 |
_last[bucket] = i; |
215 | 215 |
} |
216 | 216 |
} |
217 | 217 |
|
218 | 218 |
void findMinCutOut() { |
219 | 219 |
|
220 | 220 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
221 |
_excess |
|
221 |
(*_excess)[n] = 0; |
|
222 | 222 |
} |
223 | 223 |
|
224 | 224 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
225 |
_flow |
|
225 |
(*_flow)[a] = 0; |
|
226 | 226 |
} |
227 | 227 |
|
228 | 228 |
int bucket_num = 0; |
229 | 229 |
std::vector<Node> queue(_node_num); |
230 | 230 |
int qfirst = 0, qlast = 0, qsep = 0; |
231 | 231 |
|
232 | 232 |
{ |
233 | 233 |
typename Digraph::template NodeMap<bool> reached(_graph, false); |
234 | 234 |
|
235 |
reached |
|
235 |
reached[_source] = true; |
|
236 | 236 |
bool first_set = true; |
237 | 237 |
|
238 | 238 |
for (NodeIt t(_graph); t != INVALID; ++t) { |
239 | 239 |
if (reached[t]) continue; |
240 | 240 |
_sets.push_front(std::list<int>()); |
241 | 241 |
|
242 | 242 |
queue[qlast++] = t; |
243 |
reached |
|
243 |
reached[t] = true; |
|
244 | 244 |
|
245 | 245 |
while (qfirst != qlast) { |
246 | 246 |
if (qsep == qfirst) { |
247 | 247 |
++bucket_num; |
248 | 248 |
_sets.front().push_front(bucket_num); |
249 | 249 |
_dormant[bucket_num] = !first_set; |
250 | 250 |
_first[bucket_num] = _last[bucket_num] = INVALID; |
251 | 251 |
qsep = qlast; |
252 | 252 |
} |
253 | 253 |
|
254 | 254 |
Node n = queue[qfirst++]; |
255 | 255 |
addItem(n, bucket_num); |
256 | 256 |
|
257 | 257 |
for (InArcIt a(_graph, n); a != INVALID; ++a) { |
258 | 258 |
Node u = _graph.source(a); |
259 | 259 |
if (!reached[u] && _tolerance.positive((*_capacity)[a])) { |
260 |
reached |
|
260 |
reached[u] = true; |
|
261 | 261 |
queue[qlast++] = u; |
262 | 262 |
} |
263 | 263 |
} |
264 | 264 |
} |
265 | 265 |
first_set = false; |
266 | 266 |
} |
267 | 267 |
|
268 | 268 |
++bucket_num; |
269 |
_bucket |
|
269 |
(*_bucket)[_source] = 0; |
|
270 | 270 |
_dormant[0] = true; |
271 | 271 |
} |
272 |
_source_set |
|
272 |
(*_source_set)[_source] = true; |
|
273 | 273 |
|
274 | 274 |
Node target = _last[_sets.back().back()]; |
275 | 275 |
{ |
276 | 276 |
for (OutArcIt a(_graph, _source); a != INVALID; ++a) { |
277 | 277 |
if (_tolerance.positive((*_capacity)[a])) { |
278 | 278 |
Node u = _graph.target(a); |
279 |
_flow->set(a, (*_capacity)[a]); |
|
280 |
_excess->set(u, (*_excess)[u] + (*_capacity)[a]); |
|
279 |
(*_flow)[a] = (*_capacity)[a]; |
|
280 |
(*_excess)[u] += (*_capacity)[a]; |
|
281 | 281 |
if (!(*_active)[u] && u != _source) { |
282 | 282 |
activate(u); |
283 | 283 |
} |
284 | 284 |
} |
285 | 285 |
} |
286 | 286 |
|
287 | 287 |
if ((*_active)[target]) { |
288 | 288 |
deactivate(target); |
289 | 289 |
} |
290 | 290 |
|
291 | 291 |
_highest = _sets.back().begin(); |
292 | 292 |
while (_highest != _sets.back().end() && |
293 | 293 |
!(*_active)[_first[*_highest]]) { |
294 | 294 |
++_highest; |
295 | 295 |
} |
296 | 296 |
} |
297 | 297 |
|
298 | 298 |
while (true) { |
299 | 299 |
while (_highest != _sets.back().end()) { |
300 | 300 |
Node n = _first[*_highest]; |
301 | 301 |
Value excess = (*_excess)[n]; |
302 | 302 |
int next_bucket = _node_num; |
303 | 303 |
|
304 | 304 |
int under_bucket; |
305 | 305 |
if (++std::list<int>::iterator(_highest) == _sets.back().end()) { |
306 | 306 |
under_bucket = -1; |
307 | 307 |
} else { |
308 | 308 |
under_bucket = *(++std::list<int>::iterator(_highest)); |
309 | 309 |
} |
310 | 310 |
|
311 | 311 |
for (OutArcIt a(_graph, n); a != INVALID; ++a) { |
312 | 312 |
Node v = _graph.target(a); |
313 | 313 |
if (_dormant[(*_bucket)[v]]) continue; |
314 | 314 |
Value rem = (*_capacity)[a] - (*_flow)[a]; |
315 | 315 |
if (!_tolerance.positive(rem)) continue; |
316 | 316 |
if ((*_bucket)[v] == under_bucket) { |
317 | 317 |
if (!(*_active)[v] && v != target) { |
318 | 318 |
activate(v); |
319 | 319 |
} |
320 | 320 |
if (!_tolerance.less(rem, excess)) { |
321 |
_flow->set(a, (*_flow)[a] + excess); |
|
322 |
_excess->set(v, (*_excess)[v] + excess); |
|
321 |
(*_flow)[a] += excess; |
|
322 |
(*_excess)[v] += excess; |
|
323 | 323 |
excess = 0; |
324 | 324 |
goto no_more_push; |
325 | 325 |
} else { |
326 | 326 |
excess -= rem; |
327 |
_excess->set(v, (*_excess)[v] + rem); |
|
328 |
_flow->set(a, (*_capacity)[a]); |
|
327 |
(*_excess)[v] += rem; |
|
328 |
(*_flow)[a] = (*_capacity)[a]; |
|
329 | 329 |
} |
330 | 330 |
} else if (next_bucket > (*_bucket)[v]) { |
331 | 331 |
next_bucket = (*_bucket)[v]; |
332 | 332 |
} |
333 | 333 |
} |
334 | 334 |
|
335 | 335 |
for (InArcIt a(_graph, n); a != INVALID; ++a) { |
336 | 336 |
Node v = _graph.source(a); |
337 | 337 |
if (_dormant[(*_bucket)[v]]) continue; |
338 | 338 |
Value rem = (*_flow)[a]; |
339 | 339 |
if (!_tolerance.positive(rem)) continue; |
340 | 340 |
if ((*_bucket)[v] == under_bucket) { |
341 | 341 |
if (!(*_active)[v] && v != target) { |
342 | 342 |
activate(v); |
343 | 343 |
} |
344 | 344 |
if (!_tolerance.less(rem, excess)) { |
345 |
_flow->set(a, (*_flow)[a] - excess); |
|
346 |
_excess->set(v, (*_excess)[v] + excess); |
|
345 |
(*_flow)[a] -= excess; |
|
346 |
(*_excess)[v] += excess; |
|
347 | 347 |
excess = 0; |
348 | 348 |
goto no_more_push; |
349 | 349 |
} else { |
350 | 350 |
excess -= rem; |
351 |
_excess->set(v, (*_excess)[v] + rem); |
|
352 |
_flow->set(a, 0); |
|
351 |
(*_excess)[v] += rem; |
|
352 |
(*_flow)[a] = 0; |
|
353 | 353 |
} |
354 | 354 |
} else if (next_bucket > (*_bucket)[v]) { |
355 | 355 |
next_bucket = (*_bucket)[v]; |
356 | 356 |
} |
357 | 357 |
} |
358 | 358 |
|
359 | 359 |
no_more_push: |
360 | 360 |
|
361 |
_excess |
|
361 |
(*_excess)[n] = excess; |
|
362 | 362 |
|
363 | 363 |
if (excess != 0) { |
364 | 364 |
if ((*_next)[n] == INVALID) { |
365 | 365 |
typename std::list<std::list<int> >::iterator new_set = |
366 | 366 |
_sets.insert(--_sets.end(), std::list<int>()); |
367 | 367 |
new_set->splice(new_set->end(), _sets.back(), |
368 | 368 |
_sets.back().begin(), ++_highest); |
369 | 369 |
for (std::list<int>::iterator it = new_set->begin(); |
370 | 370 |
it != new_set->end(); ++it) { |
371 | 371 |
_dormant[*it] = true; |
372 | 372 |
} |
373 | 373 |
while (_highest != _sets.back().end() && |
374 | 374 |
!(*_active)[_first[*_highest]]) { |
375 | 375 |
++_highest; |
376 | 376 |
} |
377 | 377 |
} else if (next_bucket == _node_num) { |
378 | 378 |
_first[(*_bucket)[n]] = (*_next)[n]; |
379 |
_prev |
|
379 |
(*_prev)[(*_next)[n]] = INVALID; |
|
380 | 380 |
|
381 | 381 |
std::list<std::list<int> >::iterator new_set = |
382 | 382 |
_sets.insert(--_sets.end(), std::list<int>()); |
383 | 383 |
|
384 | 384 |
new_set->push_front(bucket_num); |
385 |
_bucket |
|
385 |
(*_bucket)[n] = bucket_num; |
|
386 | 386 |
_first[bucket_num] = _last[bucket_num] = n; |
387 |
_next->set(n, INVALID); |
|
388 |
_prev->set(n, INVALID); |
|
387 |
(*_next)[n] = INVALID; |
|
388 |
(*_prev)[n] = INVALID; |
|
389 | 389 |
_dormant[bucket_num] = true; |
390 | 390 |
++bucket_num; |
391 | 391 |
|
392 | 392 |
while (_highest != _sets.back().end() && |
393 | 393 |
!(*_active)[_first[*_highest]]) { |
394 | 394 |
++_highest; |
395 | 395 |
} |
396 | 396 |
} else { |
397 | 397 |
_first[*_highest] = (*_next)[n]; |
398 |
_prev |
|
398 |
(*_prev)[(*_next)[n]] = INVALID; |
|
399 | 399 |
|
400 | 400 |
while (next_bucket != *_highest) { |
401 | 401 |
--_highest; |
402 | 402 |
} |
403 | 403 |
|
404 | 404 |
if (_highest == _sets.back().begin()) { |
405 | 405 |
_sets.back().push_front(bucket_num); |
406 | 406 |
_dormant[bucket_num] = false; |
407 | 407 |
_first[bucket_num] = _last[bucket_num] = INVALID; |
408 | 408 |
++bucket_num; |
409 | 409 |
} |
410 | 410 |
--_highest; |
411 | 411 |
|
412 |
_bucket->set(n, *_highest); |
|
413 |
_next->set(n, _first[*_highest]); |
|
412 |
(*_bucket)[n] = *_highest; |
|
413 |
(*_next)[n] = _first[*_highest]; |
|
414 | 414 |
if (_first[*_highest] != INVALID) { |
415 |
_prev |
|
415 |
(*_prev)[_first[*_highest]] = n; |
|
416 | 416 |
} else { |
417 | 417 |
_last[*_highest] = n; |
418 | 418 |
} |
419 | 419 |
_first[*_highest] = n; |
420 | 420 |
} |
421 | 421 |
} else { |
422 | 422 |
|
423 | 423 |
deactivate(n); |
424 | 424 |
if (!(*_active)[_first[*_highest]]) { |
425 | 425 |
++_highest; |
426 | 426 |
if (_highest != _sets.back().end() && |
427 | 427 |
!(*_active)[_first[*_highest]]) { |
428 | 428 |
_highest = _sets.back().end(); |
429 | 429 |
} |
430 | 430 |
} |
431 | 431 |
} |
432 | 432 |
} |
433 | 433 |
|
434 | 434 |
if ((*_excess)[target] < _min_cut) { |
435 | 435 |
_min_cut = (*_excess)[target]; |
436 | 436 |
for (NodeIt i(_graph); i != INVALID; ++i) { |
437 |
_min_cut_map |
|
437 |
(*_min_cut_map)[i] = true; |
|
438 | 438 |
} |
439 | 439 |
for (std::list<int>::iterator it = _sets.back().begin(); |
440 | 440 |
it != _sets.back().end(); ++it) { |
441 | 441 |
Node n = _first[*it]; |
442 | 442 |
while (n != INVALID) { |
443 |
_min_cut_map |
|
443 |
(*_min_cut_map)[n] = false; |
|
444 | 444 |
n = (*_next)[n]; |
445 | 445 |
} |
446 | 446 |
} |
447 | 447 |
} |
448 | 448 |
|
449 | 449 |
{ |
450 | 450 |
Node new_target; |
451 | 451 |
if ((*_prev)[target] != INVALID || (*_next)[target] != INVALID) { |
452 | 452 |
if ((*_next)[target] == INVALID) { |
453 | 453 |
_last[(*_bucket)[target]] = (*_prev)[target]; |
454 | 454 |
new_target = (*_prev)[target]; |
455 | 455 |
} else { |
456 |
_prev |
|
456 |
(*_prev)[(*_next)[target]] = (*_prev)[target]; |
|
457 | 457 |
new_target = (*_next)[target]; |
458 | 458 |
} |
459 | 459 |
if ((*_prev)[target] == INVALID) { |
460 | 460 |
_first[(*_bucket)[target]] = (*_next)[target]; |
461 | 461 |
} else { |
462 |
_next |
|
462 |
(*_next)[(*_prev)[target]] = (*_next)[target]; |
|
463 | 463 |
} |
464 | 464 |
} else { |
465 | 465 |
_sets.back().pop_back(); |
466 | 466 |
if (_sets.back().empty()) { |
467 | 467 |
_sets.pop_back(); |
468 | 468 |
if (_sets.empty()) |
469 | 469 |
break; |
470 | 470 |
for (std::list<int>::iterator it = _sets.back().begin(); |
471 | 471 |
it != _sets.back().end(); ++it) { |
472 | 472 |
_dormant[*it] = false; |
473 | 473 |
} |
474 | 474 |
} |
475 | 475 |
new_target = _last[_sets.back().back()]; |
476 | 476 |
} |
477 | 477 |
|
478 |
_bucket |
|
478 |
(*_bucket)[target] = 0; |
|
479 | 479 |
|
480 |
_source_set |
|
480 |
(*_source_set)[target] = true; |
|
481 | 481 |
for (OutArcIt a(_graph, target); a != INVALID; ++a) { |
482 | 482 |
Value rem = (*_capacity)[a] - (*_flow)[a]; |
483 | 483 |
if (!_tolerance.positive(rem)) continue; |
484 | 484 |
Node v = _graph.target(a); |
485 | 485 |
if (!(*_active)[v] && !(*_source_set)[v]) { |
486 | 486 |
activate(v); |
487 | 487 |
} |
488 |
_excess->set(v, (*_excess)[v] + rem); |
|
489 |
_flow->set(a, (*_capacity)[a]); |
|
488 |
(*_excess)[v] += rem; |
|
489 |
(*_flow)[a] = (*_capacity)[a]; |
|
490 | 490 |
} |
491 | 491 |
|
492 | 492 |
for (InArcIt a(_graph, target); a != INVALID; ++a) { |
493 | 493 |
Value rem = (*_flow)[a]; |
494 | 494 |
if (!_tolerance.positive(rem)) continue; |
495 | 495 |
Node v = _graph.source(a); |
496 | 496 |
if (!(*_active)[v] && !(*_source_set)[v]) { |
497 | 497 |
activate(v); |
498 | 498 |
} |
499 |
_excess->set(v, (*_excess)[v] + rem); |
|
500 |
_flow->set(a, 0); |
|
499 |
(*_excess)[v] += rem; |
|
500 |
(*_flow)[a] = 0; |
|
501 | 501 |
} |
502 | 502 |
|
503 | 503 |
target = new_target; |
504 | 504 |
if ((*_active)[target]) { |
505 | 505 |
deactivate(target); |
506 | 506 |
} |
507 | 507 |
|
508 | 508 |
_highest = _sets.back().begin(); |
509 | 509 |
while (_highest != _sets.back().end() && |
510 | 510 |
!(*_active)[_first[*_highest]]) { |
511 | 511 |
++_highest; |
512 | 512 |
} |
513 | 513 |
} |
514 | 514 |
} |
515 | 515 |
} |
516 | 516 |
|
517 | 517 |
void findMinCutIn() { |
518 | 518 |
|
519 | 519 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
520 |
_excess |
|
520 |
(*_excess)[n] = 0; |
|
521 | 521 |
} |
522 | 522 |
|
523 | 523 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
524 |
_flow |
|
524 |
(*_flow)[a] = 0; |
|
525 | 525 |
} |
526 | 526 |
|
527 | 527 |
int bucket_num = 0; |
528 | 528 |
std::vector<Node> queue(_node_num); |
529 | 529 |
int qfirst = 0, qlast = 0, qsep = 0; |
530 | 530 |
|
531 | 531 |
{ |
532 | 532 |
typename Digraph::template NodeMap<bool> reached(_graph, false); |
533 | 533 |
|
534 |
reached |
|
534 |
reached[_source] = true; |
|
535 | 535 |
|
536 | 536 |
bool first_set = true; |
537 | 537 |
|
538 | 538 |
for (NodeIt t(_graph); t != INVALID; ++t) { |
539 | 539 |
if (reached[t]) continue; |
540 | 540 |
_sets.push_front(std::list<int>()); |
541 | 541 |
|
542 | 542 |
queue[qlast++] = t; |
543 |
reached |
|
543 |
reached[t] = true; |
|
544 | 544 |
|
545 | 545 |
while (qfirst != qlast) { |
546 | 546 |
if (qsep == qfirst) { |
547 | 547 |
++bucket_num; |
548 | 548 |
_sets.front().push_front(bucket_num); |
549 | 549 |
_dormant[bucket_num] = !first_set; |
550 | 550 |
_first[bucket_num] = _last[bucket_num] = INVALID; |
551 | 551 |
qsep = qlast; |
552 | 552 |
} |
553 | 553 |
|
554 | 554 |
Node n = queue[qfirst++]; |
555 | 555 |
addItem(n, bucket_num); |
556 | 556 |
|
557 | 557 |
for (OutArcIt a(_graph, n); a != INVALID; ++a) { |
558 | 558 |
Node u = _graph.target(a); |
559 | 559 |
if (!reached[u] && _tolerance.positive((*_capacity)[a])) { |
560 |
reached |
|
560 |
reached[u] = true; |
|
561 | 561 |
queue[qlast++] = u; |
562 | 562 |
} |
563 | 563 |
} |
564 | 564 |
} |
565 | 565 |
first_set = false; |
566 | 566 |
} |
567 | 567 |
|
568 | 568 |
++bucket_num; |
569 |
_bucket |
|
569 |
(*_bucket)[_source] = 0; |
|
570 | 570 |
_dormant[0] = true; |
571 | 571 |
} |
572 |
_source_set |
|
572 |
(*_source_set)[_source] = true; |
|
573 | 573 |
|
574 | 574 |
Node target = _last[_sets.back().back()]; |
575 | 575 |
{ |
576 | 576 |
for (InArcIt a(_graph, _source); a != INVALID; ++a) { |
577 | 577 |
if (_tolerance.positive((*_capacity)[a])) { |
578 | 578 |
Node u = _graph.source(a); |
579 |
_flow->set(a, (*_capacity)[a]); |
|
580 |
_excess->set(u, (*_excess)[u] + (*_capacity)[a]); |
|
579 |
(*_flow)[a] = (*_capacity)[a]; |
|
580 |
(*_excess)[u] += (*_capacity)[a]; |
|
581 | 581 |
if (!(*_active)[u] && u != _source) { |
582 | 582 |
activate(u); |
583 | 583 |
} |
584 | 584 |
} |
585 | 585 |
} |
586 | 586 |
if ((*_active)[target]) { |
587 | 587 |
deactivate(target); |
588 | 588 |
} |
589 | 589 |
|
590 | 590 |
_highest = _sets.back().begin(); |
591 | 591 |
while (_highest != _sets.back().end() && |
592 | 592 |
!(*_active)[_first[*_highest]]) { |
593 | 593 |
++_highest; |
594 | 594 |
} |
595 | 595 |
} |
596 | 596 |
|
597 | 597 |
|
598 | 598 |
while (true) { |
599 | 599 |
while (_highest != _sets.back().end()) { |
600 | 600 |
Node n = _first[*_highest]; |
601 | 601 |
Value excess = (*_excess)[n]; |
602 | 602 |
int next_bucket = _node_num; |
603 | 603 |
|
604 | 604 |
int under_bucket; |
605 | 605 |
if (++std::list<int>::iterator(_highest) == _sets.back().end()) { |
606 | 606 |
under_bucket = -1; |
607 | 607 |
} else { |
608 | 608 |
under_bucket = *(++std::list<int>::iterator(_highest)); |
609 | 609 |
} |
610 | 610 |
|
611 | 611 |
for (InArcIt a(_graph, n); a != INVALID; ++a) { |
612 | 612 |
Node v = _graph.source(a); |
613 | 613 |
if (_dormant[(*_bucket)[v]]) continue; |
614 | 614 |
Value rem = (*_capacity)[a] - (*_flow)[a]; |
615 | 615 |
if (!_tolerance.positive(rem)) continue; |
616 | 616 |
if ((*_bucket)[v] == under_bucket) { |
617 | 617 |
if (!(*_active)[v] && v != target) { |
618 | 618 |
activate(v); |
619 | 619 |
} |
620 | 620 |
if (!_tolerance.less(rem, excess)) { |
621 |
_flow->set(a, (*_flow)[a] + excess); |
|
622 |
_excess->set(v, (*_excess)[v] + excess); |
|
621 |
(*_flow)[a] += excess; |
|
622 |
(*_excess)[v] += excess; |
|
623 | 623 |
excess = 0; |
624 | 624 |
goto no_more_push; |
625 | 625 |
} else { |
626 | 626 |
excess -= rem; |
627 |
_excess->set(v, (*_excess)[v] + rem); |
|
628 |
_flow->set(a, (*_capacity)[a]); |
|
627 |
(*_excess)[v] += rem; |
|
628 |
(*_flow)[a] = (*_capacity)[a]; |
|
629 | 629 |
} |
630 | 630 |
} else if (next_bucket > (*_bucket)[v]) { |
631 | 631 |
next_bucket = (*_bucket)[v]; |
632 | 632 |
} |
633 | 633 |
} |
634 | 634 |
|
635 | 635 |
for (OutArcIt a(_graph, n); a != INVALID; ++a) { |
636 | 636 |
Node v = _graph.target(a); |
637 | 637 |
if (_dormant[(*_bucket)[v]]) continue; |
638 | 638 |
Value rem = (*_flow)[a]; |
639 | 639 |
if (!_tolerance.positive(rem)) continue; |
640 | 640 |
if ((*_bucket)[v] == under_bucket) { |
641 | 641 |
if (!(*_active)[v] && v != target) { |
642 | 642 |
activate(v); |
643 | 643 |
} |
644 | 644 |
if (!_tolerance.less(rem, excess)) { |
645 |
_flow->set(a, (*_flow)[a] - excess); |
|
646 |
_excess->set(v, (*_excess)[v] + excess); |
|
645 |
(*_flow)[a] -= excess; |
|
646 |
(*_excess)[v] += excess; |
|
647 | 647 |
excess = 0; |
648 | 648 |
goto no_more_push; |
649 | 649 |
} else { |
650 | 650 |
excess -= rem; |
651 |
_excess->set(v, (*_excess)[v] + rem); |
|
652 |
_flow->set(a, 0); |
|
651 |
(*_excess)[v] += rem; |
|
652 |
(*_flow)[a] = 0; |
|
653 | 653 |
} |
654 | 654 |
} else if (next_bucket > (*_bucket)[v]) { |
655 | 655 |
next_bucket = (*_bucket)[v]; |
656 | 656 |
} |
657 | 657 |
} |
658 | 658 |
|
659 | 659 |
no_more_push: |
660 | 660 |
|
661 |
_excess |
|
661 |
(*_excess)[n] = excess; |
|
662 | 662 |
|
663 | 663 |
if (excess != 0) { |
664 | 664 |
if ((*_next)[n] == INVALID) { |
665 | 665 |
typename std::list<std::list<int> >::iterator new_set = |
666 | 666 |
_sets.insert(--_sets.end(), std::list<int>()); |
667 | 667 |
new_set->splice(new_set->end(), _sets.back(), |
668 | 668 |
_sets.back().begin(), ++_highest); |
669 | 669 |
for (std::list<int>::iterator it = new_set->begin(); |
670 | 670 |
it != new_set->end(); ++it) { |
671 | 671 |
_dormant[*it] = true; |
672 | 672 |
} |
673 | 673 |
while (_highest != _sets.back().end() && |
674 | 674 |
!(*_active)[_first[*_highest]]) { |
675 | 675 |
++_highest; |
676 | 676 |
} |
677 | 677 |
} else if (next_bucket == _node_num) { |
678 | 678 |
_first[(*_bucket)[n]] = (*_next)[n]; |
679 |
_prev |
|
679 |
(*_prev)[(*_next)[n]] = INVALID; |
|
680 | 680 |
|
681 | 681 |
std::list<std::list<int> >::iterator new_set = |
682 | 682 |
_sets.insert(--_sets.end(), std::list<int>()); |
683 | 683 |
|
684 | 684 |
new_set->push_front(bucket_num); |
685 |
_bucket |
|
685 |
(*_bucket)[n] = bucket_num; |
|
686 | 686 |
_first[bucket_num] = _last[bucket_num] = n; |
687 |
_next->set(n, INVALID); |
|
688 |
_prev->set(n, INVALID); |
|
687 |
(*_next)[n] = INVALID; |
|
688 |
(*_prev)[n] = INVALID; |
|
689 | 689 |
_dormant[bucket_num] = true; |
690 | 690 |
++bucket_num; |
691 | 691 |
|
692 | 692 |
while (_highest != _sets.back().end() && |
693 | 693 |
!(*_active)[_first[*_highest]]) { |
694 | 694 |
++_highest; |
695 | 695 |
} |
696 | 696 |
} else { |
697 | 697 |
_first[*_highest] = (*_next)[n]; |
698 |
_prev |
|
698 |
(*_prev)[(*_next)[n]] = INVALID; |
|
699 | 699 |
|
700 | 700 |
while (next_bucket != *_highest) { |
701 | 701 |
--_highest; |
702 | 702 |
} |
703 | 703 |
if (_highest == _sets.back().begin()) { |
704 | 704 |
_sets.back().push_front(bucket_num); |
705 | 705 |
_dormant[bucket_num] = false; |
706 | 706 |
_first[bucket_num] = _last[bucket_num] = INVALID; |
707 | 707 |
++bucket_num; |
708 | 708 |
} |
709 | 709 |
--_highest; |
710 | 710 |
|
711 |
_bucket->set(n, *_highest); |
|
712 |
_next->set(n, _first[*_highest]); |
|
711 |
(*_bucket)[n] = *_highest; |
|
712 |
(*_next)[n] = _first[*_highest]; |
|
713 | 713 |
if (_first[*_highest] != INVALID) { |
714 |
_prev |
|
714 |
(*_prev)[_first[*_highest]] = n; |
|
715 | 715 |
} else { |
716 | 716 |
_last[*_highest] = n; |
717 | 717 |
} |
718 | 718 |
_first[*_highest] = n; |
719 | 719 |
} |
720 | 720 |
} else { |
721 | 721 |
|
722 | 722 |
deactivate(n); |
723 | 723 |
if (!(*_active)[_first[*_highest]]) { |
724 | 724 |
++_highest; |
725 | 725 |
if (_highest != _sets.back().end() && |
726 | 726 |
!(*_active)[_first[*_highest]]) { |
727 | 727 |
_highest = _sets.back().end(); |
728 | 728 |
} |
729 | 729 |
} |
730 | 730 |
} |
731 | 731 |
} |
732 | 732 |
|
733 | 733 |
if ((*_excess)[target] < _min_cut) { |
734 | 734 |
_min_cut = (*_excess)[target]; |
735 | 735 |
for (NodeIt i(_graph); i != INVALID; ++i) { |
736 |
_min_cut_map |
|
736 |
(*_min_cut_map)[i] = false; |
|
737 | 737 |
} |
738 | 738 |
for (std::list<int>::iterator it = _sets.back().begin(); |
739 | 739 |
it != _sets.back().end(); ++it) { |
740 | 740 |
Node n = _first[*it]; |
741 | 741 |
while (n != INVALID) { |
742 |
_min_cut_map |
|
742 |
(*_min_cut_map)[n] = true; |
|
743 | 743 |
n = (*_next)[n]; |
744 | 744 |
} |
745 | 745 |
} |
746 | 746 |
} |
747 | 747 |
|
748 | 748 |
{ |
749 | 749 |
Node new_target; |
750 | 750 |
if ((*_prev)[target] != INVALID || (*_next)[target] != INVALID) { |
751 | 751 |
if ((*_next)[target] == INVALID) { |
752 | 752 |
_last[(*_bucket)[target]] = (*_prev)[target]; |
753 | 753 |
new_target = (*_prev)[target]; |
754 | 754 |
} else { |
755 |
_prev |
|
755 |
(*_prev)[(*_next)[target]] = (*_prev)[target]; |
|
756 | 756 |
new_target = (*_next)[target]; |
757 | 757 |
} |
758 | 758 |
if ((*_prev)[target] == INVALID) { |
759 | 759 |
_first[(*_bucket)[target]] = (*_next)[target]; |
760 | 760 |
} else { |
761 |
_next |
|
761 |
(*_next)[(*_prev)[target]] = (*_next)[target]; |
|
762 | 762 |
} |
763 | 763 |
} else { |
764 | 764 |
_sets.back().pop_back(); |
765 | 765 |
if (_sets.back().empty()) { |
766 | 766 |
_sets.pop_back(); |
767 | 767 |
if (_sets.empty()) |
768 | 768 |
break; |
769 | 769 |
for (std::list<int>::iterator it = _sets.back().begin(); |
770 | 770 |
it != _sets.back().end(); ++it) { |
771 | 771 |
_dormant[*it] = false; |
772 | 772 |
} |
773 | 773 |
} |
774 | 774 |
new_target = _last[_sets.back().back()]; |
775 | 775 |
} |
776 | 776 |
|
777 |
_bucket |
|
777 |
(*_bucket)[target] = 0; |
|
778 | 778 |
|
779 |
_source_set |
|
779 |
(*_source_set)[target] = true; |
|
780 | 780 |
for (InArcIt a(_graph, target); a != INVALID; ++a) { |
781 | 781 |
Value rem = (*_capacity)[a] - (*_flow)[a]; |
782 | 782 |
if (!_tolerance.positive(rem)) continue; |
783 | 783 |
Node v = _graph.source(a); |
784 | 784 |
if (!(*_active)[v] && !(*_source_set)[v]) { |
785 | 785 |
activate(v); |
786 | 786 |
} |
787 |
_excess->set(v, (*_excess)[v] + rem); |
|
788 |
_flow->set(a, (*_capacity)[a]); |
|
787 |
(*_excess)[v] += rem; |
|
788 |
(*_flow)[a] = (*_capacity)[a]; |
|
789 | 789 |
} |
790 | 790 |
|
791 | 791 |
for (OutArcIt a(_graph, target); a != INVALID; ++a) { |
792 | 792 |
Value rem = (*_flow)[a]; |
793 | 793 |
if (!_tolerance.positive(rem)) continue; |
794 | 794 |
Node v = _graph.target(a); |
795 | 795 |
if (!(*_active)[v] && !(*_source_set)[v]) { |
796 | 796 |
activate(v); |
797 | 797 |
} |
798 |
_excess->set(v, (*_excess)[v] + rem); |
|
799 |
_flow->set(a, 0); |
|
798 |
(*_excess)[v] += rem; |
|
799 |
(*_flow)[a] = 0; |
|
800 | 800 |
} |
801 | 801 |
|
802 | 802 |
target = new_target; |
803 | 803 |
if ((*_active)[target]) { |
804 | 804 |
deactivate(target); |
805 | 805 |
} |
806 | 806 |
|
807 | 807 |
_highest = _sets.back().begin(); |
808 | 808 |
while (_highest != _sets.back().end() && |
809 | 809 |
!(*_active)[_first[*_highest]]) { |
810 | 810 |
++_highest; |
811 | 811 |
} |
812 | 812 |
} |
813 | 813 |
} |
814 | 814 |
} |
815 | 815 |
|
816 | 816 |
public: |
817 | 817 |
|
818 | 818 |
/// \name Execution control |
819 | 819 |
/// The simplest way to execute the algorithm is to use |
820 | 820 |
/// one of the member functions called \ref run(). |
821 | 821 |
/// \n |
822 | 822 |
/// If you need more control on the execution, |
823 | 823 |
/// first you must call \ref init(), then the \ref calculateIn() or |
824 | 824 |
/// \ref calculateOut() functions. |
825 | 825 |
|
826 | 826 |
/// @{ |
827 | 827 |
|
828 | 828 |
/// \brief Initializes the internal data structures. |
829 | 829 |
/// |
830 | 830 |
/// Initializes the internal data structures. It creates |
831 | 831 |
/// the maps, residual graph adaptors and some bucket structures |
832 | 832 |
/// for the algorithm. |
833 | 833 |
void init() { |
834 | 834 |
init(NodeIt(_graph)); |
835 | 835 |
} |
836 | 836 |
|
837 | 837 |
/// \brief Initializes the internal data structures. |
838 | 838 |
/// |
839 | 839 |
/// Initializes the internal data structures. It creates |
840 | 840 |
/// the maps, residual graph adaptor and some bucket structures |
841 | 841 |
/// for the algorithm. Node \c source is used as the push-relabel |
842 | 842 |
/// algorithm's source. |
843 | 843 |
void init(const Node& source) { |
844 | 844 |
_source = source; |
845 | 845 |
|
846 | 846 |
_node_num = countNodes(_graph); |
847 | 847 |
|
848 | 848 |
_first.resize(_node_num); |
849 | 849 |
_last.resize(_node_num); |
850 | 850 |
|
851 | 851 |
_dormant.resize(_node_num); |
852 | 852 |
|
853 | 853 |
if (!_flow) { |
854 | 854 |
_flow = new FlowMap(_graph); |
855 | 855 |
} |
856 | 856 |
if (!_next) { |
857 | 857 |
_next = new typename Digraph::template NodeMap<Node>(_graph); |
858 | 858 |
} |
859 | 859 |
if (!_prev) { |
860 | 860 |
_prev = new typename Digraph::template NodeMap<Node>(_graph); |
861 | 861 |
} |
862 | 862 |
if (!_active) { |
863 | 863 |
_active = new typename Digraph::template NodeMap<bool>(_graph); |
864 | 864 |
} |
865 | 865 |
if (!_bucket) { |
866 | 866 |
_bucket = new typename Digraph::template NodeMap<int>(_graph); |
867 | 867 |
} |
868 | 868 |
if (!_excess) { |
869 | 869 |
_excess = new ExcessMap(_graph); |
870 | 870 |
} |
871 | 871 |
if (!_source_set) { |
872 | 872 |
_source_set = new SourceSetMap(_graph); |
873 | 873 |
} |
874 | 874 |
if (!_min_cut_map) { |
875 | 875 |
_min_cut_map = new MinCutMap(_graph); |
876 | 876 |
} |
877 | 877 |
|
878 | 878 |
_min_cut = std::numeric_limits<Value>::max(); |
879 | 879 |
} |
880 | 880 |
|
881 | 881 |
|
882 | 882 |
/// \brief Calculates a minimum cut with \f$ source \f$ on the |
883 | 883 |
/// source-side. |
884 | 884 |
/// |
885 | 885 |
/// Calculates a minimum cut with \f$ source \f$ on the |
886 | 886 |
/// source-side (i.e. a set \f$ X\subsetneq V \f$ with |
887 | 887 |
/// \f$ source \in X \f$ and minimal out-degree). |
888 | 888 |
void calculateOut() { |
889 | 889 |
findMinCutOut(); |
890 | 890 |
} |
891 | 891 |
|
892 | 892 |
/// \brief Calculates a minimum cut with \f$ source \f$ on the |
893 | 893 |
/// target-side. |
894 | 894 |
/// |
895 | 895 |
/// Calculates a minimum cut with \f$ source \f$ on the |
... | ... |
@@ -189,423 +189,423 @@ |
189 | 189 |
for (OutArcIt b(_graph, w); b != INVALID; ++b) { |
190 | 190 |
Node x = _graph.target(b); |
191 | 191 |
if ((*_status)[x] == MATCHED) { |
192 | 192 |
extendOnArc(b); |
193 | 193 |
} else if ((*_status)[x] == UNMATCHED) { |
194 | 194 |
augmentOnArc(b); |
195 | 195 |
return; |
196 | 196 |
} |
197 | 197 |
} |
198 | 198 |
} |
199 | 199 |
} |
200 | 200 |
} |
201 | 201 |
} |
202 | 202 |
|
203 | 203 |
void processSparse(const Node& n) { |
204 | 204 |
_process = _last = 0; |
205 | 205 |
_node_queue[_last++] = n; |
206 | 206 |
while (_process != _last) { |
207 | 207 |
Node u = _node_queue[_process++]; |
208 | 208 |
for (OutArcIt a(_graph, u); a != INVALID; ++a) { |
209 | 209 |
Node v = _graph.target(a); |
210 | 210 |
|
211 | 211 |
if ((*_status)[v] == EVEN) { |
212 | 212 |
if (_blossom_set->find(u) != _blossom_set->find(v)) { |
213 | 213 |
shrinkOnEdge(a); |
214 | 214 |
} |
215 | 215 |
} else if ((*_status)[v] == MATCHED) { |
216 | 216 |
extendOnArc(a); |
217 | 217 |
} else if ((*_status)[v] == UNMATCHED) { |
218 | 218 |
augmentOnArc(a); |
219 | 219 |
return; |
220 | 220 |
} |
221 | 221 |
} |
222 | 222 |
} |
223 | 223 |
} |
224 | 224 |
|
225 | 225 |
void shrinkOnEdge(const Edge& e) { |
226 | 226 |
Node nca = INVALID; |
227 | 227 |
|
228 | 228 |
{ |
229 | 229 |
std::set<Node> left_set, right_set; |
230 | 230 |
|
231 | 231 |
Node left = (*_blossom_rep)[_blossom_set->find(_graph.u(e))]; |
232 | 232 |
left_set.insert(left); |
233 | 233 |
|
234 | 234 |
Node right = (*_blossom_rep)[_blossom_set->find(_graph.v(e))]; |
235 | 235 |
right_set.insert(right); |
236 | 236 |
|
237 | 237 |
while (true) { |
238 | 238 |
if ((*_matching)[left] == INVALID) break; |
239 | 239 |
left = _graph.target((*_matching)[left]); |
240 | 240 |
left = (*_blossom_rep)[_blossom_set-> |
241 | 241 |
find(_graph.target((*_ear)[left]))]; |
242 | 242 |
if (right_set.find(left) != right_set.end()) { |
243 | 243 |
nca = left; |
244 | 244 |
break; |
245 | 245 |
} |
246 | 246 |
left_set.insert(left); |
247 | 247 |
|
248 | 248 |
if ((*_matching)[right] == INVALID) break; |
249 | 249 |
right = _graph.target((*_matching)[right]); |
250 | 250 |
right = (*_blossom_rep)[_blossom_set-> |
251 | 251 |
find(_graph.target((*_ear)[right]))]; |
252 | 252 |
if (left_set.find(right) != left_set.end()) { |
253 | 253 |
nca = right; |
254 | 254 |
break; |
255 | 255 |
} |
256 | 256 |
right_set.insert(right); |
257 | 257 |
} |
258 | 258 |
|
259 | 259 |
if (nca == INVALID) { |
260 | 260 |
if ((*_matching)[left] == INVALID) { |
261 | 261 |
nca = right; |
262 | 262 |
while (left_set.find(nca) == left_set.end()) { |
263 | 263 |
nca = _graph.target((*_matching)[nca]); |
264 | 264 |
nca =(*_blossom_rep)[_blossom_set-> |
265 | 265 |
find(_graph.target((*_ear)[nca]))]; |
266 | 266 |
} |
267 | 267 |
} else { |
268 | 268 |
nca = left; |
269 | 269 |
while (right_set.find(nca) == right_set.end()) { |
270 | 270 |
nca = _graph.target((*_matching)[nca]); |
271 | 271 |
nca = (*_blossom_rep)[_blossom_set-> |
272 | 272 |
find(_graph.target((*_ear)[nca]))]; |
273 | 273 |
} |
274 | 274 |
} |
275 | 275 |
} |
276 | 276 |
} |
277 | 277 |
|
278 | 278 |
{ |
279 | 279 |
|
280 | 280 |
Node node = _graph.u(e); |
281 | 281 |
Arc arc = _graph.direct(e, true); |
282 | 282 |
Node base = (*_blossom_rep)[_blossom_set->find(node)]; |
283 | 283 |
|
284 | 284 |
while (base != nca) { |
285 |
_ear |
|
285 |
(*_ear)[node] = arc; |
|
286 | 286 |
|
287 | 287 |
Node n = node; |
288 | 288 |
while (n != base) { |
289 | 289 |
n = _graph.target((*_matching)[n]); |
290 | 290 |
Arc a = (*_ear)[n]; |
291 | 291 |
n = _graph.target(a); |
292 |
_ear |
|
292 |
(*_ear)[n] = _graph.oppositeArc(a); |
|
293 | 293 |
} |
294 | 294 |
node = _graph.target((*_matching)[base]); |
295 | 295 |
_tree_set->erase(base); |
296 | 296 |
_tree_set->erase(node); |
297 | 297 |
_blossom_set->insert(node, _blossom_set->find(base)); |
298 |
_status |
|
298 |
(*_status)[node] = EVEN; |
|
299 | 299 |
_node_queue[_last++] = node; |
300 | 300 |
arc = _graph.oppositeArc((*_ear)[node]); |
301 | 301 |
node = _graph.target((*_ear)[node]); |
302 | 302 |
base = (*_blossom_rep)[_blossom_set->find(node)]; |
303 | 303 |
_blossom_set->join(_graph.target(arc), base); |
304 | 304 |
} |
305 | 305 |
} |
306 | 306 |
|
307 |
_blossom_rep |
|
307 |
(*_blossom_rep)[_blossom_set->find(nca)] = nca; |
|
308 | 308 |
|
309 | 309 |
{ |
310 | 310 |
|
311 | 311 |
Node node = _graph.v(e); |
312 | 312 |
Arc arc = _graph.direct(e, false); |
313 | 313 |
Node base = (*_blossom_rep)[_blossom_set->find(node)]; |
314 | 314 |
|
315 | 315 |
while (base != nca) { |
316 |
_ear |
|
316 |
(*_ear)[node] = arc; |
|
317 | 317 |
|
318 | 318 |
Node n = node; |
319 | 319 |
while (n != base) { |
320 | 320 |
n = _graph.target((*_matching)[n]); |
321 | 321 |
Arc a = (*_ear)[n]; |
322 | 322 |
n = _graph.target(a); |
323 |
_ear |
|
323 |
(*_ear)[n] = _graph.oppositeArc(a); |
|
324 | 324 |
} |
325 | 325 |
node = _graph.target((*_matching)[base]); |
326 | 326 |
_tree_set->erase(base); |
327 | 327 |
_tree_set->erase(node); |
328 | 328 |
_blossom_set->insert(node, _blossom_set->find(base)); |
329 |
_status |
|
329 |
(*_status)[node] = EVEN; |
|
330 | 330 |
_node_queue[_last++] = node; |
331 | 331 |
arc = _graph.oppositeArc((*_ear)[node]); |
332 | 332 |
node = _graph.target((*_ear)[node]); |
333 | 333 |
base = (*_blossom_rep)[_blossom_set->find(node)]; |
334 | 334 |
_blossom_set->join(_graph.target(arc), base); |
335 | 335 |
} |
336 | 336 |
} |
337 | 337 |
|
338 |
_blossom_rep |
|
338 |
(*_blossom_rep)[_blossom_set->find(nca)] = nca; |
|
339 | 339 |
} |
340 | 340 |
|
341 | 341 |
|
342 | 342 |
|
343 | 343 |
void extendOnArc(const Arc& a) { |
344 | 344 |
Node base = _graph.source(a); |
345 | 345 |
Node odd = _graph.target(a); |
346 | 346 |
|
347 |
_ear |
|
347 |
(*_ear)[odd] = _graph.oppositeArc(a); |
|
348 | 348 |
Node even = _graph.target((*_matching)[odd]); |
349 |
_blossom_rep->set(_blossom_set->insert(even), even); |
|
350 |
_status->set(odd, ODD); |
|
351 |
|
|
349 |
(*_blossom_rep)[_blossom_set->insert(even)] = even; |
|
350 |
(*_status)[odd] = ODD; |
|
351 |
(*_status)[even] = EVEN; |
|
352 | 352 |
int tree = _tree_set->find((*_blossom_rep)[_blossom_set->find(base)]); |
353 | 353 |
_tree_set->insert(odd, tree); |
354 | 354 |
_tree_set->insert(even, tree); |
355 | 355 |
_node_queue[_last++] = even; |
356 | 356 |
|
357 | 357 |
} |
358 | 358 |
|
359 | 359 |
void augmentOnArc(const Arc& a) { |
360 | 360 |
Node even = _graph.source(a); |
361 | 361 |
Node odd = _graph.target(a); |
362 | 362 |
|
363 | 363 |
int tree = _tree_set->find((*_blossom_rep)[_blossom_set->find(even)]); |
364 | 364 |
|
365 |
_matching->set(odd, _graph.oppositeArc(a)); |
|
366 |
_status->set(odd, MATCHED); |
|
365 |
(*_matching)[odd] = _graph.oppositeArc(a); |
|
366 |
(*_status)[odd] = MATCHED; |
|
367 | 367 |
|
368 | 368 |
Arc arc = (*_matching)[even]; |
369 |
_matching |
|
369 |
(*_matching)[even] = a; |
|
370 | 370 |
|
371 | 371 |
while (arc != INVALID) { |
372 | 372 |
odd = _graph.target(arc); |
373 | 373 |
arc = (*_ear)[odd]; |
374 | 374 |
even = _graph.target(arc); |
375 |
_matching |
|
375 |
(*_matching)[odd] = arc; |
|
376 | 376 |
arc = (*_matching)[even]; |
377 |
_matching |
|
377 |
(*_matching)[even] = _graph.oppositeArc((*_matching)[odd]); |
|
378 | 378 |
} |
379 | 379 |
|
380 | 380 |
for (typename TreeSet::ItemIt it(*_tree_set, tree); |
381 | 381 |
it != INVALID; ++it) { |
382 | 382 |
if ((*_status)[it] == ODD) { |
383 |
_status |
|
383 |
(*_status)[it] = MATCHED; |
|
384 | 384 |
} else { |
385 | 385 |
int blossom = _blossom_set->find(it); |
386 | 386 |
for (typename BlossomSet::ItemIt jt(*_blossom_set, blossom); |
387 | 387 |
jt != INVALID; ++jt) { |
388 |
_status |
|
388 |
(*_status)[jt] = MATCHED; |
|
389 | 389 |
} |
390 | 390 |
_blossom_set->eraseClass(blossom); |
391 | 391 |
} |
392 | 392 |
} |
393 | 393 |
_tree_set->eraseClass(tree); |
394 | 394 |
|
395 | 395 |
} |
396 | 396 |
|
397 | 397 |
public: |
398 | 398 |
|
399 | 399 |
/// \brief Constructor |
400 | 400 |
/// |
401 | 401 |
/// Constructor. |
402 | 402 |
MaxMatching(const Graph& graph) |
403 | 403 |
: _graph(graph), _matching(0), _status(0), _ear(0), |
404 | 404 |
_blossom_set_index(0), _blossom_set(0), _blossom_rep(0), |
405 | 405 |
_tree_set_index(0), _tree_set(0) {} |
406 | 406 |
|
407 | 407 |
~MaxMatching() { |
408 | 408 |
destroyStructures(); |
409 | 409 |
} |
410 | 410 |
|
411 | 411 |
/// \name Execution control |
412 | 412 |
/// The simplest way to execute the algorithm is to use the |
413 | 413 |
/// \c run() member function. |
414 | 414 |
/// \n |
415 | 415 |
|
416 | 416 |
/// If you need better control on the execution, you must call |
417 | 417 |
/// \ref init(), \ref greedyInit() or \ref matchingInit() |
418 | 418 |
/// functions first, then you can start the algorithm with the \ref |
419 | 419 |
/// startSparse() or startDense() functions. |
420 | 420 |
|
421 | 421 |
///@{ |
422 | 422 |
|
423 | 423 |
/// \brief Sets the actual matching to the empty matching. |
424 | 424 |
/// |
425 | 425 |
/// Sets the actual matching to the empty matching. |
426 | 426 |
/// |
427 | 427 |
void init() { |
428 | 428 |
createStructures(); |
429 | 429 |
for(NodeIt n(_graph); n != INVALID; ++n) { |
430 |
_matching->set(n, INVALID); |
|
431 |
_status->set(n, UNMATCHED); |
|
430 |
(*_matching)[n] = INVALID; |
|
431 |
(*_status)[n] = UNMATCHED; |
|
432 | 432 |
} |
433 | 433 |
} |
434 | 434 |
|
435 | 435 |
///\brief Finds an initial matching in a greedy way |
436 | 436 |
/// |
437 | 437 |
///It finds an initial matching in a greedy way. |
438 | 438 |
void greedyInit() { |
439 | 439 |
createStructures(); |
440 | 440 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
441 |
_matching->set(n, INVALID); |
|
442 |
_status->set(n, UNMATCHED); |
|
441 |
(*_matching)[n] = INVALID; |
|
442 |
(*_status)[n] = UNMATCHED; |
|
443 | 443 |
} |
444 | 444 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
445 | 445 |
if ((*_matching)[n] == INVALID) { |
446 | 446 |
for (OutArcIt a(_graph, n); a != INVALID ; ++a) { |
447 | 447 |
Node v = _graph.target(a); |
448 | 448 |
if ((*_matching)[v] == INVALID && v != n) { |
449 |
_matching->set(n, a); |
|
450 |
_status->set(n, MATCHED); |
|
451 |
_matching->set(v, _graph.oppositeArc(a)); |
|
452 |
_status->set(v, MATCHED); |
|
449 |
(*_matching)[n] = a; |
|
450 |
(*_status)[n] = MATCHED; |
|
451 |
(*_matching)[v] = _graph.oppositeArc(a); |
|
452 |
(*_status)[v] = MATCHED; |
|
453 | 453 |
break; |
454 | 454 |
} |
455 | 455 |
} |
456 | 456 |
} |
457 | 457 |
} |
458 | 458 |
} |
459 | 459 |
|
460 | 460 |
|
461 | 461 |
/// \brief Initialize the matching from a map containing. |
462 | 462 |
/// |
463 | 463 |
/// Initialize the matching from a \c bool valued \c Edge map. This |
464 | 464 |
/// map must have the property that there are no two incident edges |
465 | 465 |
/// with true value, ie. it contains a matching. |
466 | 466 |
/// \return \c true if the map contains a matching. |
467 | 467 |
template <typename MatchingMap> |
468 | 468 |
bool matchingInit(const MatchingMap& matching) { |
469 | 469 |
createStructures(); |
470 | 470 |
|
471 | 471 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
472 |
_matching->set(n, INVALID); |
|
473 |
_status->set(n, UNMATCHED); |
|
472 |
(*_matching)[n] = INVALID; |
|
473 |
(*_status)[n] = UNMATCHED; |
|
474 | 474 |
} |
475 | 475 |
for(EdgeIt e(_graph); e!=INVALID; ++e) { |
476 | 476 |
if (matching[e]) { |
477 | 477 |
|
478 | 478 |
Node u = _graph.u(e); |
479 | 479 |
if ((*_matching)[u] != INVALID) return false; |
480 |
_matching->set(u, _graph.direct(e, true)); |
|
481 |
_status->set(u, MATCHED); |
|
480 |
(*_matching)[u] = _graph.direct(e, true); |
|
481 |
(*_status)[u] = MATCHED; |
|
482 | 482 |
|
483 | 483 |
Node v = _graph.v(e); |
484 | 484 |
if ((*_matching)[v] != INVALID) return false; |
485 |
_matching->set(v, _graph.direct(e, false)); |
|
486 |
_status->set(v, MATCHED); |
|
485 |
(*_matching)[v] = _graph.direct(e, false); |
|
486 |
(*_status)[v] = MATCHED; |
|
487 | 487 |
} |
488 | 488 |
} |
489 | 489 |
return true; |
490 | 490 |
} |
491 | 491 |
|
492 | 492 |
/// \brief Starts Edmonds' algorithm |
493 | 493 |
/// |
494 | 494 |
/// If runs the original Edmonds' algorithm. |
495 | 495 |
void startSparse() { |
496 | 496 |
for(NodeIt n(_graph); n != INVALID; ++n) { |
497 | 497 |
if ((*_status)[n] == UNMATCHED) { |
498 | 498 |
(*_blossom_rep)[_blossom_set->insert(n)] = n; |
499 | 499 |
_tree_set->insert(n); |
500 |
_status |
|
500 |
(*_status)[n] = EVEN; |
|
501 | 501 |
processSparse(n); |
502 | 502 |
} |
503 | 503 |
} |
504 | 504 |
} |
505 | 505 |
|
506 | 506 |
/// \brief Starts Edmonds' algorithm. |
507 | 507 |
/// |
508 | 508 |
/// It runs Edmonds' algorithm with a heuristic of postponing |
509 | 509 |
/// shrinks, therefore resulting in a faster algorithm for dense graphs. |
510 | 510 |
void startDense() { |
511 | 511 |
for(NodeIt n(_graph); n != INVALID; ++n) { |
512 | 512 |
if ((*_status)[n] == UNMATCHED) { |
513 | 513 |
(*_blossom_rep)[_blossom_set->insert(n)] = n; |
514 | 514 |
_tree_set->insert(n); |
515 |
_status |
|
515 |
(*_status)[n] = EVEN; |
|
516 | 516 |
processDense(n); |
517 | 517 |
} |
518 | 518 |
} |
519 | 519 |
} |
520 | 520 |
|
521 | 521 |
|
522 | 522 |
/// \brief Runs Edmonds' algorithm |
523 | 523 |
/// |
524 | 524 |
/// Runs Edmonds' algorithm for sparse graphs (<tt>m<2*n</tt>) |
525 | 525 |
/// or Edmonds' algorithm with a heuristic of |
526 | 526 |
/// postponing shrinks for dense graphs. |
527 | 527 |
void run() { |
528 | 528 |
if (countEdges(_graph) < 2 * countNodes(_graph)) { |
529 | 529 |
greedyInit(); |
530 | 530 |
startSparse(); |
531 | 531 |
} else { |
532 | 532 |
init(); |
533 | 533 |
startDense(); |
534 | 534 |
} |
535 | 535 |
} |
536 | 536 |
|
537 | 537 |
/// @} |
538 | 538 |
|
539 | 539 |
/// \name Primal solution |
540 | 540 |
/// Functions to get the primal solution, ie. the matching. |
541 | 541 |
|
542 | 542 |
/// @{ |
543 | 543 |
|
544 | 544 |
///\brief Returns the size of the current matching. |
545 | 545 |
/// |
546 | 546 |
///Returns the size of the current matching. After \ref |
547 | 547 |
///run() it returns the size of the maximum matching in the graph. |
548 | 548 |
int matchingSize() const { |
549 | 549 |
int size = 0; |
550 | 550 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
551 | 551 |
if ((*_matching)[n] != INVALID) { |
552 | 552 |
++size; |
553 | 553 |
} |
554 | 554 |
} |
555 | 555 |
return size / 2; |
556 | 556 |
} |
557 | 557 |
|
558 | 558 |
/// \brief Returns true when the edge is in the matching. |
559 | 559 |
/// |
560 | 560 |
/// Returns true when the edge is in the matching. |
561 | 561 |
bool matching(const Edge& edge) const { |
562 | 562 |
return edge == (*_matching)[_graph.u(edge)]; |
563 | 563 |
} |
564 | 564 |
|
565 | 565 |
/// \brief Returns the matching edge incident to the given node. |
566 | 566 |
/// |
567 | 567 |
/// Returns the matching edge of a \c node in the actual matching or |
568 | 568 |
/// INVALID if the \c node is not covered by the actual matching. |
569 | 569 |
Arc matching(const Node& n) const { |
570 | 570 |
return (*_matching)[n]; |
571 | 571 |
} |
572 | 572 |
|
573 | 573 |
///\brief Returns the mate of a node in the actual matching. |
574 | 574 |
/// |
575 | 575 |
///Returns the mate of a \c node in the actual matching or |
576 | 576 |
///INVALID if the \c node is not covered by the actual matching. |
577 | 577 |
Node mate(const Node& n) const { |
578 | 578 |
return (*_matching)[n] != INVALID ? |
579 | 579 |
_graph.target((*_matching)[n]) : INVALID; |
580 | 580 |
} |
581 | 581 |
|
582 | 582 |
/// @} |
583 | 583 |
|
584 | 584 |
/// \name Dual solution |
585 | 585 |
/// Functions to get the dual solution, ie. the decomposition. |
586 | 586 |
|
587 | 587 |
/// @{ |
588 | 588 |
|
589 | 589 |
/// \brief Returns the class of the node in the Edmonds-Gallai |
590 | 590 |
/// decomposition. |
591 | 591 |
/// |
592 | 592 |
/// Returns the class of the node in the Edmonds-Gallai |
593 | 593 |
/// decomposition. |
594 | 594 |
Status decomposition(const Node& n) const { |
595 | 595 |
return (*_status)[n]; |
596 | 596 |
} |
597 | 597 |
|
598 | 598 |
/// \brief Returns true when the node is in the barrier. |
599 | 599 |
/// |
600 | 600 |
/// Returns true when the node is in the barrier. |
601 | 601 |
bool barrier(const Node& n) const { |
602 | 602 |
return (*_status)[n] == ODD; |
603 | 603 |
} |
604 | 604 |
|
605 | 605 |
/// @} |
606 | 606 |
|
607 | 607 |
}; |
608 | 608 |
|
609 | 609 |
/// \ingroup matching |
610 | 610 |
/// |
611 | 611 |
/// \brief Weighted matching in general graphs |
... | ... |
@@ -1455,311 +1455,311 @@ |
1455 | 1455 |
for (int i = 0; i < int(subblossoms.size()); ++i) { |
1456 | 1456 |
if (subblossoms[i] == b) ib = i; |
1457 | 1457 |
if (subblossoms[i] == d) id = i; |
1458 | 1458 |
|
1459 | 1459 |
(*_blossom_data)[subblossoms[i]].offset = offset; |
1460 | 1460 |
if (!_blossom_set->trivial(subblossoms[i])) { |
1461 | 1461 |
(*_blossom_data)[subblossoms[i]].pot -= 2 * offset; |
1462 | 1462 |
} |
1463 | 1463 |
if (_blossom_set->classPrio(subblossoms[i]) != |
1464 | 1464 |
std::numeric_limits<Value>::max()) { |
1465 | 1465 |
_delta2->push(subblossoms[i], |
1466 | 1466 |
_blossom_set->classPrio(subblossoms[i]) - |
1467 | 1467 |
(*_blossom_data)[subblossoms[i]].offset); |
1468 | 1468 |
} |
1469 | 1469 |
} |
1470 | 1470 |
|
1471 | 1471 |
if (id > ib ? ((id - ib) % 2 == 0) : ((ib - id) % 2 == 1)) { |
1472 | 1472 |
for (int i = (id + 1) % subblossoms.size(); |
1473 | 1473 |
i != ib; i = (i + 2) % subblossoms.size()) { |
1474 | 1474 |
int sb = subblossoms[i]; |
1475 | 1475 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
1476 | 1476 |
(*_blossom_data)[sb].next = |
1477 | 1477 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
1478 | 1478 |
} |
1479 | 1479 |
|
1480 | 1480 |
for (int i = ib; i != id; i = (i + 2) % subblossoms.size()) { |
1481 | 1481 |
int sb = subblossoms[i]; |
1482 | 1482 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
1483 | 1483 |
int ub = subblossoms[(i + 2) % subblossoms.size()]; |
1484 | 1484 |
|
1485 | 1485 |
(*_blossom_data)[sb].status = ODD; |
1486 | 1486 |
matchedToOdd(sb); |
1487 | 1487 |
_tree_set->insert(sb, tree); |
1488 | 1488 |
(*_blossom_data)[sb].pred = pred; |
1489 | 1489 |
(*_blossom_data)[sb].next = |
1490 | 1490 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
1491 | 1491 |
|
1492 | 1492 |
pred = (*_blossom_data)[ub].next; |
1493 | 1493 |
|
1494 | 1494 |
(*_blossom_data)[tb].status = EVEN; |
1495 | 1495 |
matchedToEven(tb, tree); |
1496 | 1496 |
_tree_set->insert(tb, tree); |
1497 | 1497 |
(*_blossom_data)[tb].pred = (*_blossom_data)[tb].next; |
1498 | 1498 |
} |
1499 | 1499 |
|
1500 | 1500 |
(*_blossom_data)[subblossoms[id]].status = ODD; |
1501 | 1501 |
matchedToOdd(subblossoms[id]); |
1502 | 1502 |
_tree_set->insert(subblossoms[id], tree); |
1503 | 1503 |
(*_blossom_data)[subblossoms[id]].next = next; |
1504 | 1504 |
(*_blossom_data)[subblossoms[id]].pred = pred; |
1505 | 1505 |
|
1506 | 1506 |
} else { |
1507 | 1507 |
|
1508 | 1508 |
for (int i = (ib + 1) % subblossoms.size(); |
1509 | 1509 |
i != id; i = (i + 2) % subblossoms.size()) { |
1510 | 1510 |
int sb = subblossoms[i]; |
1511 | 1511 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
1512 | 1512 |
(*_blossom_data)[sb].next = |
1513 | 1513 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
1514 | 1514 |
} |
1515 | 1515 |
|
1516 | 1516 |
for (int i = id; i != ib; i = (i + 2) % subblossoms.size()) { |
1517 | 1517 |
int sb = subblossoms[i]; |
1518 | 1518 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
1519 | 1519 |
int ub = subblossoms[(i + 2) % subblossoms.size()]; |
1520 | 1520 |
|
1521 | 1521 |
(*_blossom_data)[sb].status = ODD; |
1522 | 1522 |
matchedToOdd(sb); |
1523 | 1523 |
_tree_set->insert(sb, tree); |
1524 | 1524 |
(*_blossom_data)[sb].next = next; |
1525 | 1525 |
(*_blossom_data)[sb].pred = |
1526 | 1526 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
1527 | 1527 |
|
1528 | 1528 |
(*_blossom_data)[tb].status = EVEN; |
1529 | 1529 |
matchedToEven(tb, tree); |
1530 | 1530 |
_tree_set->insert(tb, tree); |
1531 | 1531 |
(*_blossom_data)[tb].pred = |
1532 | 1532 |
(*_blossom_data)[tb].next = |
1533 | 1533 |
_graph.oppositeArc((*_blossom_data)[ub].next); |
1534 | 1534 |
next = (*_blossom_data)[ub].next; |
1535 | 1535 |
} |
1536 | 1536 |
|
1537 | 1537 |
(*_blossom_data)[subblossoms[ib]].status = ODD; |
1538 | 1538 |
matchedToOdd(subblossoms[ib]); |
1539 | 1539 |
_tree_set->insert(subblossoms[ib], tree); |
1540 | 1540 |
(*_blossom_data)[subblossoms[ib]].next = next; |
1541 | 1541 |
(*_blossom_data)[subblossoms[ib]].pred = pred; |
1542 | 1542 |
} |
1543 | 1543 |
_tree_set->erase(blossom); |
1544 | 1544 |
} |
1545 | 1545 |
|
1546 | 1546 |
void extractBlossom(int blossom, const Node& base, const Arc& matching) { |
1547 | 1547 |
if (_blossom_set->trivial(blossom)) { |
1548 | 1548 |
int bi = (*_node_index)[base]; |
1549 | 1549 |
Value pot = (*_node_data)[bi].pot; |
1550 | 1550 |
|
1551 |
_matching |
|
1551 |
(*_matching)[base] = matching; |
|
1552 | 1552 |
_blossom_node_list.push_back(base); |
1553 |
_node_potential |
|
1553 |
(*_node_potential)[base] = pot; |
|
1554 | 1554 |
} else { |
1555 | 1555 |
|
1556 | 1556 |
Value pot = (*_blossom_data)[blossom].pot; |
1557 | 1557 |
int bn = _blossom_node_list.size(); |
1558 | 1558 |
|
1559 | 1559 |
std::vector<int> subblossoms; |
1560 | 1560 |
_blossom_set->split(blossom, std::back_inserter(subblossoms)); |
1561 | 1561 |
int b = _blossom_set->find(base); |
1562 | 1562 |
int ib = -1; |
1563 | 1563 |
for (int i = 0; i < int(subblossoms.size()); ++i) { |
1564 | 1564 |
if (subblossoms[i] == b) { ib = i; break; } |
1565 | 1565 |
} |
1566 | 1566 |
|
1567 | 1567 |
for (int i = 1; i < int(subblossoms.size()); i += 2) { |
1568 | 1568 |
int sb = subblossoms[(ib + i) % subblossoms.size()]; |
1569 | 1569 |
int tb = subblossoms[(ib + i + 1) % subblossoms.size()]; |
1570 | 1570 |
|
1571 | 1571 |
Arc m = (*_blossom_data)[tb].next; |
1572 | 1572 |
extractBlossom(sb, _graph.target(m), _graph.oppositeArc(m)); |
1573 | 1573 |
extractBlossom(tb, _graph.source(m), m); |
1574 | 1574 |
} |
1575 | 1575 |
extractBlossom(subblossoms[ib], base, matching); |
1576 | 1576 |
|
1577 | 1577 |
int en = _blossom_node_list.size(); |
1578 | 1578 |
|
1579 | 1579 |
_blossom_potential.push_back(BlossomVariable(bn, en, pot)); |
1580 | 1580 |
} |
1581 | 1581 |
} |
1582 | 1582 |
|
1583 | 1583 |
void extractMatching() { |
1584 | 1584 |
std::vector<int> blossoms; |
1585 | 1585 |
for (typename BlossomSet::ClassIt c(*_blossom_set); c != INVALID; ++c) { |
1586 | 1586 |
blossoms.push_back(c); |
1587 | 1587 |
} |
1588 | 1588 |
|
1589 | 1589 |
for (int i = 0; i < int(blossoms.size()); ++i) { |
1590 | 1590 |
if ((*_blossom_data)[blossoms[i]].status == MATCHED) { |
1591 | 1591 |
|
1592 | 1592 |
Value offset = (*_blossom_data)[blossoms[i]].offset; |
1593 | 1593 |
(*_blossom_data)[blossoms[i]].pot += 2 * offset; |
1594 | 1594 |
for (typename BlossomSet::ItemIt n(*_blossom_set, blossoms[i]); |
1595 | 1595 |
n != INVALID; ++n) { |
1596 | 1596 |
(*_node_data)[(*_node_index)[n]].pot -= offset; |
1597 | 1597 |
} |
1598 | 1598 |
|
1599 | 1599 |
Arc matching = (*_blossom_data)[blossoms[i]].next; |
1600 | 1600 |
Node base = _graph.source(matching); |
1601 | 1601 |
extractBlossom(blossoms[i], base, matching); |
1602 | 1602 |
} else { |
1603 | 1603 |
Node base = (*_blossom_data)[blossoms[i]].base; |
1604 | 1604 |
extractBlossom(blossoms[i], base, INVALID); |
1605 | 1605 |
} |
1606 | 1606 |
} |
1607 | 1607 |
} |
1608 | 1608 |
|
1609 | 1609 |
public: |
1610 | 1610 |
|
1611 | 1611 |
/// \brief Constructor |
1612 | 1612 |
/// |
1613 | 1613 |
/// Constructor. |
1614 | 1614 |
MaxWeightedMatching(const Graph& graph, const WeightMap& weight) |
1615 | 1615 |
: _graph(graph), _weight(weight), _matching(0), |
1616 | 1616 |
_node_potential(0), _blossom_potential(), _blossom_node_list(), |
1617 | 1617 |
_node_num(0), _blossom_num(0), |
1618 | 1618 |
|
1619 | 1619 |
_blossom_index(0), _blossom_set(0), _blossom_data(0), |
1620 | 1620 |
_node_index(0), _node_heap_index(0), _node_data(0), |
1621 | 1621 |
_tree_set_index(0), _tree_set(0), |
1622 | 1622 |
|
1623 | 1623 |
_delta1_index(0), _delta1(0), |
1624 | 1624 |
_delta2_index(0), _delta2(0), |
1625 | 1625 |
_delta3_index(0), _delta3(0), |
1626 | 1626 |
_delta4_index(0), _delta4(0), |
1627 | 1627 |
|
1628 | 1628 |
_delta_sum() {} |
1629 | 1629 |
|
1630 | 1630 |
~MaxWeightedMatching() { |
1631 | 1631 |
destroyStructures(); |
1632 | 1632 |
} |
1633 | 1633 |
|
1634 | 1634 |
/// \name Execution control |
1635 | 1635 |
/// The simplest way to execute the algorithm is to use the |
1636 | 1636 |
/// \c run() member function. |
1637 | 1637 |
|
1638 | 1638 |
///@{ |
1639 | 1639 |
|
1640 | 1640 |
/// \brief Initialize the algorithm |
1641 | 1641 |
/// |
1642 | 1642 |
/// Initialize the algorithm |
1643 | 1643 |
void init() { |
1644 | 1644 |
createStructures(); |
1645 | 1645 |
|
1646 | 1646 |
for (ArcIt e(_graph); e != INVALID; ++e) { |
1647 |
_node_heap_index |
|
1647 |
(*_node_heap_index)[e] = BinHeap<Value, IntArcMap>::PRE_HEAP; |
|
1648 | 1648 |
} |
1649 | 1649 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
1650 |
_delta1_index |
|
1650 |
(*_delta1_index)[n] = _delta1->PRE_HEAP; |
|
1651 | 1651 |
} |
1652 | 1652 |
for (EdgeIt e(_graph); e != INVALID; ++e) { |
1653 |
_delta3_index |
|
1653 |
(*_delta3_index)[e] = _delta3->PRE_HEAP; |
|
1654 | 1654 |
} |
1655 | 1655 |
for (int i = 0; i < _blossom_num; ++i) { |
1656 |
_delta2_index->set(i, _delta2->PRE_HEAP); |
|
1657 |
_delta4_index->set(i, _delta4->PRE_HEAP); |
|
1656 |
(*_delta2_index)[i] = _delta2->PRE_HEAP; |
|
1657 |
(*_delta4_index)[i] = _delta4->PRE_HEAP; |
|
1658 | 1658 |
} |
1659 | 1659 |
|
1660 | 1660 |
int index = 0; |
1661 | 1661 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
1662 | 1662 |
Value max = 0; |
1663 | 1663 |
for (OutArcIt e(_graph, n); e != INVALID; ++e) { |
1664 | 1664 |
if (_graph.target(e) == n) continue; |
1665 | 1665 |
if ((dualScale * _weight[e]) / 2 > max) { |
1666 | 1666 |
max = (dualScale * _weight[e]) / 2; |
1667 | 1667 |
} |
1668 | 1668 |
} |
1669 |
_node_index |
|
1669 |
(*_node_index)[n] = index; |
|
1670 | 1670 |
(*_node_data)[index].pot = max; |
1671 | 1671 |
_delta1->push(n, max); |
1672 | 1672 |
int blossom = |
1673 | 1673 |
_blossom_set->insert(n, std::numeric_limits<Value>::max()); |
1674 | 1674 |
|
1675 | 1675 |
_tree_set->insert(blossom); |
1676 | 1676 |
|
1677 | 1677 |
(*_blossom_data)[blossom].status = EVEN; |
1678 | 1678 |
(*_blossom_data)[blossom].pred = INVALID; |
1679 | 1679 |
(*_blossom_data)[blossom].next = INVALID; |
1680 | 1680 |
(*_blossom_data)[blossom].pot = 0; |
1681 | 1681 |
(*_blossom_data)[blossom].offset = 0; |
1682 | 1682 |
++index; |
1683 | 1683 |
} |
1684 | 1684 |
for (EdgeIt e(_graph); e != INVALID; ++e) { |
1685 | 1685 |
int si = (*_node_index)[_graph.u(e)]; |
1686 | 1686 |
int ti = (*_node_index)[_graph.v(e)]; |
1687 | 1687 |
if (_graph.u(e) != _graph.v(e)) { |
1688 | 1688 |
_delta3->push(e, ((*_node_data)[si].pot + (*_node_data)[ti].pot - |
1689 | 1689 |
dualScale * _weight[e]) / 2); |
1690 | 1690 |
} |
1691 | 1691 |
} |
1692 | 1692 |
} |
1693 | 1693 |
|
1694 | 1694 |
/// \brief Starts the algorithm |
1695 | 1695 |
/// |
1696 | 1696 |
/// Starts the algorithm |
1697 | 1697 |
void start() { |
1698 | 1698 |
enum OpType { |
1699 | 1699 |
D1, D2, D3, D4 |
1700 | 1700 |
}; |
1701 | 1701 |
|
1702 | 1702 |
int unmatched = _node_num; |
1703 | 1703 |
while (unmatched > 0) { |
1704 | 1704 |
Value d1 = !_delta1->empty() ? |
1705 | 1705 |
_delta1->prio() : std::numeric_limits<Value>::max(); |
1706 | 1706 |
|
1707 | 1707 |
Value d2 = !_delta2->empty() ? |
1708 | 1708 |
_delta2->prio() : std::numeric_limits<Value>::max(); |
1709 | 1709 |
|
1710 | 1710 |
Value d3 = !_delta3->empty() ? |
1711 | 1711 |
_delta3->prio() : std::numeric_limits<Value>::max(); |
1712 | 1712 |
|
1713 | 1713 |
Value d4 = !_delta4->empty() ? |
1714 | 1714 |
_delta4->prio() : std::numeric_limits<Value>::max(); |
1715 | 1715 |
|
1716 | 1716 |
_delta_sum = d1; OpType ot = D1; |
1717 | 1717 |
if (d2 < _delta_sum) { _delta_sum = d2; ot = D2; } |
1718 | 1718 |
if (d3 < _delta_sum) { _delta_sum = d3; ot = D3; } |
1719 | 1719 |
if (d4 < _delta_sum) { _delta_sum = d4; ot = D4; } |
1720 | 1720 |
|
1721 | 1721 |
|
1722 | 1722 |
switch (ot) { |
1723 | 1723 |
case D1: |
1724 | 1724 |
{ |
1725 | 1725 |
Node n = _delta1->top(); |
1726 | 1726 |
unmatchNode(n); |
1727 | 1727 |
--unmatched; |
1728 | 1728 |
} |
1729 | 1729 |
break; |
1730 | 1730 |
case D2: |
1731 | 1731 |
{ |
1732 | 1732 |
int blossom = _delta2->top(); |
1733 | 1733 |
Node n = _blossom_set->classTop(blossom); |
1734 | 1734 |
Arc e = (*_node_data)[(*_node_index)[n]].heap.top(); |
1735 | 1735 |
extendOnArc(e); |
1736 | 1736 |
} |
1737 | 1737 |
break; |
1738 | 1738 |
case D3: |
1739 | 1739 |
{ |
1740 | 1740 |
Edge e = _delta3->top(); |
1741 | 1741 |
|
1742 | 1742 |
int left_blossom = _blossom_set->find(_graph.u(e)); |
1743 | 1743 |
int right_blossom = _blossom_set->find(_graph.v(e)); |
1744 | 1744 |
|
1745 | 1745 |
if (left_blossom == right_blossom) { |
1746 | 1746 |
_delta3->pop(); |
1747 | 1747 |
} else { |
1748 | 1748 |
int left_tree; |
1749 | 1749 |
if ((*_blossom_data)[left_blossom].status == EVEN) { |
1750 | 1750 |
left_tree = _tree_set->find(left_blossom); |
1751 | 1751 |
} else { |
1752 | 1752 |
left_tree = -1; |
1753 | 1753 |
++unmatched; |
1754 | 1754 |
} |
1755 | 1755 |
int right_tree; |
1756 | 1756 |
if ((*_blossom_data)[right_blossom].status == EVEN) { |
1757 | 1757 |
right_tree = _tree_set->find(right_blossom); |
1758 | 1758 |
} else { |
1759 | 1759 |
right_tree = -1; |
1760 | 1760 |
++unmatched; |
1761 | 1761 |
} |
1762 | 1762 |
|
1763 | 1763 |
if (left_tree == right_tree) { |
1764 | 1764 |
shrinkOnEdge(e, left_tree); |
1765 | 1765 |
} else { |
... | ... |
@@ -2648,302 +2648,302 @@ |
2648 | 2648 |
for (int i = 0; i < int(subblossoms.size()); ++i) { |
2649 | 2649 |
if (subblossoms[i] == b) ib = i; |
2650 | 2650 |
if (subblossoms[i] == d) id = i; |
2651 | 2651 |
|
2652 | 2652 |
(*_blossom_data)[subblossoms[i]].offset = offset; |
2653 | 2653 |
if (!_blossom_set->trivial(subblossoms[i])) { |
2654 | 2654 |
(*_blossom_data)[subblossoms[i]].pot -= 2 * offset; |
2655 | 2655 |
} |
2656 | 2656 |
if (_blossom_set->classPrio(subblossoms[i]) != |
2657 | 2657 |
std::numeric_limits<Value>::max()) { |
2658 | 2658 |
_delta2->push(subblossoms[i], |
2659 | 2659 |
_blossom_set->classPrio(subblossoms[i]) - |
2660 | 2660 |
(*_blossom_data)[subblossoms[i]].offset); |
2661 | 2661 |
} |
2662 | 2662 |
} |
2663 | 2663 |
|
2664 | 2664 |
if (id > ib ? ((id - ib) % 2 == 0) : ((ib - id) % 2 == 1)) { |
2665 | 2665 |
for (int i = (id + 1) % subblossoms.size(); |
2666 | 2666 |
i != ib; i = (i + 2) % subblossoms.size()) { |
2667 | 2667 |
int sb = subblossoms[i]; |
2668 | 2668 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
2669 | 2669 |
(*_blossom_data)[sb].next = |
2670 | 2670 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
2671 | 2671 |
} |
2672 | 2672 |
|
2673 | 2673 |
for (int i = ib; i != id; i = (i + 2) % subblossoms.size()) { |
2674 | 2674 |
int sb = subblossoms[i]; |
2675 | 2675 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
2676 | 2676 |
int ub = subblossoms[(i + 2) % subblossoms.size()]; |
2677 | 2677 |
|
2678 | 2678 |
(*_blossom_data)[sb].status = ODD; |
2679 | 2679 |
matchedToOdd(sb); |
2680 | 2680 |
_tree_set->insert(sb, tree); |
2681 | 2681 |
(*_blossom_data)[sb].pred = pred; |
2682 | 2682 |
(*_blossom_data)[sb].next = |
2683 | 2683 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
2684 | 2684 |
|
2685 | 2685 |
pred = (*_blossom_data)[ub].next; |
2686 | 2686 |
|
2687 | 2687 |
(*_blossom_data)[tb].status = EVEN; |
2688 | 2688 |
matchedToEven(tb, tree); |
2689 | 2689 |
_tree_set->insert(tb, tree); |
2690 | 2690 |
(*_blossom_data)[tb].pred = (*_blossom_data)[tb].next; |
2691 | 2691 |
} |
2692 | 2692 |
|
2693 | 2693 |
(*_blossom_data)[subblossoms[id]].status = ODD; |
2694 | 2694 |
matchedToOdd(subblossoms[id]); |
2695 | 2695 |
_tree_set->insert(subblossoms[id], tree); |
2696 | 2696 |
(*_blossom_data)[subblossoms[id]].next = next; |
2697 | 2697 |
(*_blossom_data)[subblossoms[id]].pred = pred; |
2698 | 2698 |
|
2699 | 2699 |
} else { |
2700 | 2700 |
|
2701 | 2701 |
for (int i = (ib + 1) % subblossoms.size(); |
2702 | 2702 |
i != id; i = (i + 2) % subblossoms.size()) { |
2703 | 2703 |
int sb = subblossoms[i]; |
2704 | 2704 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
2705 | 2705 |
(*_blossom_data)[sb].next = |
2706 | 2706 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
2707 | 2707 |
} |
2708 | 2708 |
|
2709 | 2709 |
for (int i = id; i != ib; i = (i + 2) % subblossoms.size()) { |
2710 | 2710 |
int sb = subblossoms[i]; |
2711 | 2711 |
int tb = subblossoms[(i + 1) % subblossoms.size()]; |
2712 | 2712 |
int ub = subblossoms[(i + 2) % subblossoms.size()]; |
2713 | 2713 |
|
2714 | 2714 |
(*_blossom_data)[sb].status = ODD; |
2715 | 2715 |
matchedToOdd(sb); |
2716 | 2716 |
_tree_set->insert(sb, tree); |
2717 | 2717 |
(*_blossom_data)[sb].next = next; |
2718 | 2718 |
(*_blossom_data)[sb].pred = |
2719 | 2719 |
_graph.oppositeArc((*_blossom_data)[tb].next); |
2720 | 2720 |
|
2721 | 2721 |
(*_blossom_data)[tb].status = EVEN; |
2722 | 2722 |
matchedToEven(tb, tree); |
2723 | 2723 |
_tree_set->insert(tb, tree); |
2724 | 2724 |
(*_blossom_data)[tb].pred = |
2725 | 2725 |
(*_blossom_data)[tb].next = |
2726 | 2726 |
_graph.oppositeArc((*_blossom_data)[ub].next); |
2727 | 2727 |
next = (*_blossom_data)[ub].next; |
2728 | 2728 |
} |
2729 | 2729 |
|
2730 | 2730 |
(*_blossom_data)[subblossoms[ib]].status = ODD; |
2731 | 2731 |
matchedToOdd(subblossoms[ib]); |
2732 | 2732 |
_tree_set->insert(subblossoms[ib], tree); |
2733 | 2733 |
(*_blossom_data)[subblossoms[ib]].next = next; |
2734 | 2734 |
(*_blossom_data)[subblossoms[ib]].pred = pred; |
2735 | 2735 |
} |
2736 | 2736 |
_tree_set->erase(blossom); |
2737 | 2737 |
} |
2738 | 2738 |
|
2739 | 2739 |
void extractBlossom(int blossom, const Node& base, const Arc& matching) { |
2740 | 2740 |
if (_blossom_set->trivial(blossom)) { |
2741 | 2741 |
int bi = (*_node_index)[base]; |
2742 | 2742 |
Value pot = (*_node_data)[bi].pot; |
2743 | 2743 |
|
2744 |
_matching |
|
2744 |
(*_matching)[base] = matching; |
|
2745 | 2745 |
_blossom_node_list.push_back(base); |
2746 |
_node_potential |
|
2746 |
(*_node_potential)[base] = pot; |
|
2747 | 2747 |
} else { |
2748 | 2748 |
|
2749 | 2749 |
Value pot = (*_blossom_data)[blossom].pot; |
2750 | 2750 |
int bn = _blossom_node_list.size(); |
2751 | 2751 |
|
2752 | 2752 |
std::vector<int> subblossoms; |
2753 | 2753 |
_blossom_set->split(blossom, std::back_inserter(subblossoms)); |
2754 | 2754 |
int b = _blossom_set->find(base); |
2755 | 2755 |
int ib = -1; |
2756 | 2756 |
for (int i = 0; i < int(subblossoms.size()); ++i) { |
2757 | 2757 |
if (subblossoms[i] == b) { ib = i; break; } |
2758 | 2758 |
} |
2759 | 2759 |
|
2760 | 2760 |
for (int i = 1; i < int(subblossoms.size()); i += 2) { |
2761 | 2761 |
int sb = subblossoms[(ib + i) % subblossoms.size()]; |
2762 | 2762 |
int tb = subblossoms[(ib + i + 1) % subblossoms.size()]; |
2763 | 2763 |
|
2764 | 2764 |
Arc m = (*_blossom_data)[tb].next; |
2765 | 2765 |
extractBlossom(sb, _graph.target(m), _graph.oppositeArc(m)); |
2766 | 2766 |
extractBlossom(tb, _graph.source(m), m); |
2767 | 2767 |
} |
2768 | 2768 |
extractBlossom(subblossoms[ib], base, matching); |
2769 | 2769 |
|
2770 | 2770 |
int en = _blossom_node_list.size(); |
2771 | 2771 |
|
2772 | 2772 |
_blossom_potential.push_back(BlossomVariable(bn, en, pot)); |
2773 | 2773 |
} |
2774 | 2774 |
} |
2775 | 2775 |
|
2776 | 2776 |
void extractMatching() { |
2777 | 2777 |
std::vector<int> blossoms; |
2778 | 2778 |
for (typename BlossomSet::ClassIt c(*_blossom_set); c != INVALID; ++c) { |
2779 | 2779 |
blossoms.push_back(c); |
2780 | 2780 |
} |
2781 | 2781 |
|
2782 | 2782 |
for (int i = 0; i < int(blossoms.size()); ++i) { |
2783 | 2783 |
|
2784 | 2784 |
Value offset = (*_blossom_data)[blossoms[i]].offset; |
2785 | 2785 |
(*_blossom_data)[blossoms[i]].pot += 2 * offset; |
2786 | 2786 |
for (typename BlossomSet::ItemIt n(*_blossom_set, blossoms[i]); |
2787 | 2787 |
n != INVALID; ++n) { |
2788 | 2788 |
(*_node_data)[(*_node_index)[n]].pot -= offset; |
2789 | 2789 |
} |
2790 | 2790 |
|
2791 | 2791 |
Arc matching = (*_blossom_data)[blossoms[i]].next; |
2792 | 2792 |
Node base = _graph.source(matching); |
2793 | 2793 |
extractBlossom(blossoms[i], base, matching); |
2794 | 2794 |
} |
2795 | 2795 |
} |
2796 | 2796 |
|
2797 | 2797 |
public: |
2798 | 2798 |
|
2799 | 2799 |
/// \brief Constructor |
2800 | 2800 |
/// |
2801 | 2801 |
/// Constructor. |
2802 | 2802 |
MaxWeightedPerfectMatching(const Graph& graph, const WeightMap& weight) |
2803 | 2803 |
: _graph(graph), _weight(weight), _matching(0), |
2804 | 2804 |
_node_potential(0), _blossom_potential(), _blossom_node_list(), |
2805 | 2805 |
_node_num(0), _blossom_num(0), |
2806 | 2806 |
|
2807 | 2807 |
_blossom_index(0), _blossom_set(0), _blossom_data(0), |
2808 | 2808 |
_node_index(0), _node_heap_index(0), _node_data(0), |
2809 | 2809 |
_tree_set_index(0), _tree_set(0), |
2810 | 2810 |
|
2811 | 2811 |
_delta2_index(0), _delta2(0), |
2812 | 2812 |
_delta3_index(0), _delta3(0), |
2813 | 2813 |
_delta4_index(0), _delta4(0), |
2814 | 2814 |
|
2815 | 2815 |
_delta_sum() {} |
2816 | 2816 |
|
2817 | 2817 |
~MaxWeightedPerfectMatching() { |
2818 | 2818 |
destroyStructures(); |
2819 | 2819 |
} |
2820 | 2820 |
|
2821 | 2821 |
/// \name Execution control |
2822 | 2822 |
/// The simplest way to execute the algorithm is to use the |
2823 | 2823 |
/// \c run() member function. |
2824 | 2824 |
|
2825 | 2825 |
///@{ |
2826 | 2826 |
|
2827 | 2827 |
/// \brief Initialize the algorithm |
2828 | 2828 |
/// |
2829 | 2829 |
/// Initialize the algorithm |
2830 | 2830 |
void init() { |
2831 | 2831 |
createStructures(); |
2832 | 2832 |
|
2833 | 2833 |
for (ArcIt e(_graph); e != INVALID; ++e) { |
2834 |
_node_heap_index |
|
2834 |
(*_node_heap_index)[e] = BinHeap<Value, IntArcMap>::PRE_HEAP; |
|
2835 | 2835 |
} |
2836 | 2836 |
for (EdgeIt e(_graph); e != INVALID; ++e) { |
2837 |
_delta3_index |
|
2837 |
(*_delta3_index)[e] = _delta3->PRE_HEAP; |
|
2838 | 2838 |
} |
2839 | 2839 |
for (int i = 0; i < _blossom_num; ++i) { |
2840 |
_delta2_index->set(i, _delta2->PRE_HEAP); |
|
2841 |
_delta4_index->set(i, _delta4->PRE_HEAP); |
|
2840 |
(*_delta2_index)[i] = _delta2->PRE_HEAP; |
|
2841 |
(*_delta4_index)[i] = _delta4->PRE_HEAP; |
|
2842 | 2842 |
} |
2843 | 2843 |
|
2844 | 2844 |
int index = 0; |
2845 | 2845 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
2846 | 2846 |
Value max = - std::numeric_limits<Value>::max(); |
2847 | 2847 |
for (OutArcIt e(_graph, n); e != INVALID; ++e) { |
2848 | 2848 |
if (_graph.target(e) == n) continue; |
2849 | 2849 |
if ((dualScale * _weight[e]) / 2 > max) { |
2850 | 2850 |
max = (dualScale * _weight[e]) / 2; |
2851 | 2851 |
} |
2852 | 2852 |
} |
2853 |
_node_index |
|
2853 |
(*_node_index)[n] = index; |
|
2854 | 2854 |
(*_node_data)[index].pot = max; |
2855 | 2855 |
int blossom = |
2856 | 2856 |
_blossom_set->insert(n, std::numeric_limits<Value>::max()); |
2857 | 2857 |
|
2858 | 2858 |
_tree_set->insert(blossom); |
2859 | 2859 |
|
2860 | 2860 |
(*_blossom_data)[blossom].status = EVEN; |
2861 | 2861 |
(*_blossom_data)[blossom].pred = INVALID; |
2862 | 2862 |
(*_blossom_data)[blossom].next = INVALID; |
2863 | 2863 |
(*_blossom_data)[blossom].pot = 0; |
2864 | 2864 |
(*_blossom_data)[blossom].offset = 0; |
2865 | 2865 |
++index; |
2866 | 2866 |
} |
2867 | 2867 |
for (EdgeIt e(_graph); e != INVALID; ++e) { |
2868 | 2868 |
int si = (*_node_index)[_graph.u(e)]; |
2869 | 2869 |
int ti = (*_node_index)[_graph.v(e)]; |
2870 | 2870 |
if (_graph.u(e) != _graph.v(e)) { |
2871 | 2871 |
_delta3->push(e, ((*_node_data)[si].pot + (*_node_data)[ti].pot - |
2872 | 2872 |
dualScale * _weight[e]) / 2); |
2873 | 2873 |
} |
2874 | 2874 |
} |
2875 | 2875 |
} |
2876 | 2876 |
|
2877 | 2877 |
/// \brief Starts the algorithm |
2878 | 2878 |
/// |
2879 | 2879 |
/// Starts the algorithm |
2880 | 2880 |
bool start() { |
2881 | 2881 |
enum OpType { |
2882 | 2882 |
D2, D3, D4 |
2883 | 2883 |
}; |
2884 | 2884 |
|
2885 | 2885 |
int unmatched = _node_num; |
2886 | 2886 |
while (unmatched > 0) { |
2887 | 2887 |
Value d2 = !_delta2->empty() ? |
2888 | 2888 |
_delta2->prio() : std::numeric_limits<Value>::max(); |
2889 | 2889 |
|
2890 | 2890 |
Value d3 = !_delta3->empty() ? |
2891 | 2891 |
_delta3->prio() : std::numeric_limits<Value>::max(); |
2892 | 2892 |
|
2893 | 2893 |
Value d4 = !_delta4->empty() ? |
2894 | 2894 |
_delta4->prio() : std::numeric_limits<Value>::max(); |
2895 | 2895 |
|
2896 | 2896 |
_delta_sum = d2; OpType ot = D2; |
2897 | 2897 |
if (d3 < _delta_sum) { _delta_sum = d3; ot = D3; } |
2898 | 2898 |
if (d4 < _delta_sum) { _delta_sum = d4; ot = D4; } |
2899 | 2899 |
|
2900 | 2900 |
if (_delta_sum == std::numeric_limits<Value>::max()) { |
2901 | 2901 |
return false; |
2902 | 2902 |
} |
2903 | 2903 |
|
2904 | 2904 |
switch (ot) { |
2905 | 2905 |
case D2: |
2906 | 2906 |
{ |
2907 | 2907 |
int blossom = _delta2->top(); |
2908 | 2908 |
Node n = _blossom_set->classTop(blossom); |
2909 | 2909 |
Arc e = (*_node_data)[(*_node_index)[n]].heap.top(); |
2910 | 2910 |
extendOnArc(e); |
2911 | 2911 |
} |
2912 | 2912 |
break; |
2913 | 2913 |
case D3: |
2914 | 2914 |
{ |
2915 | 2915 |
Edge e = _delta3->top(); |
2916 | 2916 |
|
2917 | 2917 |
int left_blossom = _blossom_set->find(_graph.u(e)); |
2918 | 2918 |
int right_blossom = _blossom_set->find(_graph.v(e)); |
2919 | 2919 |
|
2920 | 2920 |
if (left_blossom == right_blossom) { |
2921 | 2921 |
_delta3->pop(); |
2922 | 2922 |
} else { |
2923 | 2923 |
int left_tree = _tree_set->find(left_blossom); |
2924 | 2924 |
int right_tree = _tree_set->find(right_blossom); |
2925 | 2925 |
|
2926 | 2926 |
if (left_tree == right_tree) { |
2927 | 2927 |
shrinkOnEdge(e, left_tree); |
2928 | 2928 |
} else { |
2929 | 2929 |
augmentOnEdge(e); |
2930 | 2930 |
unmatched -= 2; |
2931 | 2931 |
} |
2932 | 2932 |
} |
2933 | 2933 |
} break; |
2934 | 2934 |
case D4: |
2935 | 2935 |
splitBlossom(_delta4->top()); |
2936 | 2936 |
break; |
2937 | 2937 |
} |
2938 | 2938 |
} |
2939 | 2939 |
extractMatching(); |
2940 | 2940 |
return true; |
2941 | 2941 |
} |
2942 | 2942 |
|
2943 | 2943 |
/// \brief Runs %MaxWeightedPerfectMatching algorithm. |
2944 | 2944 |
/// |
2945 | 2945 |
/// This method runs the %MaxWeightedPerfectMatching algorithm. |
2946 | 2946 |
/// |
2947 | 2947 |
/// \note mwm.run() is just a shortcut of the following code. |
2948 | 2948 |
/// \code |
2949 | 2949 |
/// mwm.init(); |
... | ... |
@@ -200,264 +200,264 @@ |
200 | 200 |
typedef std::vector<DualVariable> DualVariables; |
201 | 201 |
|
202 | 202 |
DualVariables _dual_variables; |
203 | 203 |
|
204 | 204 |
typedef typename Digraph::template NodeMap<int> HeapCrossRef; |
205 | 205 |
|
206 | 206 |
HeapCrossRef *_heap_cross_ref; |
207 | 207 |
|
208 | 208 |
typedef BinHeap<int, HeapCrossRef> Heap; |
209 | 209 |
|
210 | 210 |
Heap *_heap; |
211 | 211 |
|
212 | 212 |
protected: |
213 | 213 |
|
214 | 214 |
MinCostArborescence() {} |
215 | 215 |
|
216 | 216 |
private: |
217 | 217 |
|
218 | 218 |
void createStructures() { |
219 | 219 |
if (!_pred) { |
220 | 220 |
local_pred = true; |
221 | 221 |
_pred = Traits::createPredMap(*_digraph); |
222 | 222 |
} |
223 | 223 |
if (!_arborescence) { |
224 | 224 |
local_arborescence = true; |
225 | 225 |
_arborescence = Traits::createArborescenceMap(*_digraph); |
226 | 226 |
} |
227 | 227 |
if (!_arc_order) { |
228 | 228 |
_arc_order = new ArcOrder(*_digraph); |
229 | 229 |
} |
230 | 230 |
if (!_node_order) { |
231 | 231 |
_node_order = new NodeOrder(*_digraph); |
232 | 232 |
} |
233 | 233 |
if (!_cost_arcs) { |
234 | 234 |
_cost_arcs = new CostArcMap(*_digraph); |
235 | 235 |
} |
236 | 236 |
if (!_heap_cross_ref) { |
237 | 237 |
_heap_cross_ref = new HeapCrossRef(*_digraph, -1); |
238 | 238 |
} |
239 | 239 |
if (!_heap) { |
240 | 240 |
_heap = new Heap(*_heap_cross_ref); |
241 | 241 |
} |
242 | 242 |
} |
243 | 243 |
|
244 | 244 |
void destroyStructures() { |
245 | 245 |
if (local_arborescence) { |
246 | 246 |
delete _arborescence; |
247 | 247 |
} |
248 | 248 |
if (local_pred) { |
249 | 249 |
delete _pred; |
250 | 250 |
} |
251 | 251 |
if (_arc_order) { |
252 | 252 |
delete _arc_order; |
253 | 253 |
} |
254 | 254 |
if (_node_order) { |
255 | 255 |
delete _node_order; |
256 | 256 |
} |
257 | 257 |
if (_cost_arcs) { |
258 | 258 |
delete _cost_arcs; |
259 | 259 |
} |
260 | 260 |
if (_heap) { |
261 | 261 |
delete _heap; |
262 | 262 |
} |
263 | 263 |
if (_heap_cross_ref) { |
264 | 264 |
delete _heap_cross_ref; |
265 | 265 |
} |
266 | 266 |
} |
267 | 267 |
|
268 | 268 |
Arc prepare(Node node) { |
269 | 269 |
std::vector<Node> nodes; |
270 | 270 |
(*_node_order)[node] = _dual_node_list.size(); |
271 | 271 |
StackLevel level; |
272 | 272 |
level.node_level = _dual_node_list.size(); |
273 | 273 |
_dual_node_list.push_back(node); |
274 | 274 |
for (InArcIt it(*_digraph, node); it != INVALID; ++it) { |
275 | 275 |
Arc arc = it; |
276 | 276 |
Node source = _digraph->source(arc); |
277 | 277 |
Value value = (*_cost)[it]; |
278 | 278 |
if (source == node || (*_node_order)[source] == -3) continue; |
279 | 279 |
if ((*_cost_arcs)[source].arc == INVALID) { |
280 | 280 |
(*_cost_arcs)[source].arc = arc; |
281 | 281 |
(*_cost_arcs)[source].value = value; |
282 | 282 |
nodes.push_back(source); |
283 | 283 |
} else { |
284 | 284 |
if ((*_cost_arcs)[source].value > value) { |
285 | 285 |
(*_cost_arcs)[source].arc = arc; |
286 | 286 |
(*_cost_arcs)[source].value = value; |
287 | 287 |
} |
288 | 288 |
} |
289 | 289 |
} |
290 | 290 |
CostArc minimum = (*_cost_arcs)[nodes[0]]; |
291 | 291 |
for (int i = 1; i < int(nodes.size()); ++i) { |
292 | 292 |
if ((*_cost_arcs)[nodes[i]].value < minimum.value) { |
293 | 293 |
minimum = (*_cost_arcs)[nodes[i]]; |
294 | 294 |
} |
295 | 295 |
} |
296 |
_arc_order |
|
296 |
(*_arc_order)[minimum.arc] = _dual_variables.size(); |
|
297 | 297 |
DualVariable var(_dual_node_list.size() - 1, |
298 | 298 |
_dual_node_list.size(), minimum.value); |
299 | 299 |
_dual_variables.push_back(var); |
300 | 300 |
for (int i = 0; i < int(nodes.size()); ++i) { |
301 | 301 |
(*_cost_arcs)[nodes[i]].value -= minimum.value; |
302 | 302 |
level.arcs.push_back((*_cost_arcs)[nodes[i]]); |
303 | 303 |
(*_cost_arcs)[nodes[i]].arc = INVALID; |
304 | 304 |
} |
305 | 305 |
level_stack.push_back(level); |
306 | 306 |
return minimum.arc; |
307 | 307 |
} |
308 | 308 |
|
309 | 309 |
Arc contract(Node node) { |
310 | 310 |
int node_bottom = bottom(node); |
311 | 311 |
std::vector<Node> nodes; |
312 | 312 |
while (!level_stack.empty() && |
313 | 313 |
level_stack.back().node_level >= node_bottom) { |
314 | 314 |
for (int i = 0; i < int(level_stack.back().arcs.size()); ++i) { |
315 | 315 |
Arc arc = level_stack.back().arcs[i].arc; |
316 | 316 |
Node source = _digraph->source(arc); |
317 | 317 |
Value value = level_stack.back().arcs[i].value; |
318 | 318 |
if ((*_node_order)[source] >= node_bottom) continue; |
319 | 319 |
if ((*_cost_arcs)[source].arc == INVALID) { |
320 | 320 |
(*_cost_arcs)[source].arc = arc; |
321 | 321 |
(*_cost_arcs)[source].value = value; |
322 | 322 |
nodes.push_back(source); |
323 | 323 |
} else { |
324 | 324 |
if ((*_cost_arcs)[source].value > value) { |
325 | 325 |
(*_cost_arcs)[source].arc = arc; |
326 | 326 |
(*_cost_arcs)[source].value = value; |
327 | 327 |
} |
328 | 328 |
} |
329 | 329 |
} |
330 | 330 |
level_stack.pop_back(); |
331 | 331 |
} |
332 | 332 |
CostArc minimum = (*_cost_arcs)[nodes[0]]; |
333 | 333 |
for (int i = 1; i < int(nodes.size()); ++i) { |
334 | 334 |
if ((*_cost_arcs)[nodes[i]].value < minimum.value) { |
335 | 335 |
minimum = (*_cost_arcs)[nodes[i]]; |
336 | 336 |
} |
337 | 337 |
} |
338 |
_arc_order |
|
338 |
(*_arc_order)[minimum.arc] = _dual_variables.size(); |
|
339 | 339 |
DualVariable var(node_bottom, _dual_node_list.size(), minimum.value); |
340 | 340 |
_dual_variables.push_back(var); |
341 | 341 |
StackLevel level; |
342 | 342 |
level.node_level = node_bottom; |
343 | 343 |
for (int i = 0; i < int(nodes.size()); ++i) { |
344 | 344 |
(*_cost_arcs)[nodes[i]].value -= minimum.value; |
345 | 345 |
level.arcs.push_back((*_cost_arcs)[nodes[i]]); |
346 | 346 |
(*_cost_arcs)[nodes[i]].arc = INVALID; |
347 | 347 |
} |
348 | 348 |
level_stack.push_back(level); |
349 | 349 |
return minimum.arc; |
350 | 350 |
} |
351 | 351 |
|
352 | 352 |
int bottom(Node node) { |
353 | 353 |
int k = level_stack.size() - 1; |
354 | 354 |
while (level_stack[k].node_level > (*_node_order)[node]) { |
355 | 355 |
--k; |
356 | 356 |
} |
357 | 357 |
return level_stack[k].node_level; |
358 | 358 |
} |
359 | 359 |
|
360 | 360 |
void finalize(Arc arc) { |
361 | 361 |
Node node = _digraph->target(arc); |
362 | 362 |
_heap->push(node, (*_arc_order)[arc]); |
363 | 363 |
_pred->set(node, arc); |
364 | 364 |
while (!_heap->empty()) { |
365 | 365 |
Node source = _heap->top(); |
366 | 366 |
_heap->pop(); |
367 |
_node_order |
|
367 |
(*_node_order)[source] = -1; |
|
368 | 368 |
for (OutArcIt it(*_digraph, source); it != INVALID; ++it) { |
369 | 369 |
if ((*_arc_order)[it] < 0) continue; |
370 | 370 |
Node target = _digraph->target(it); |
371 | 371 |
switch(_heap->state(target)) { |
372 | 372 |
case Heap::PRE_HEAP: |
373 | 373 |
_heap->push(target, (*_arc_order)[it]); |
374 | 374 |
_pred->set(target, it); |
375 | 375 |
break; |
376 | 376 |
case Heap::IN_HEAP: |
377 | 377 |
if ((*_arc_order)[it] < (*_heap)[target]) { |
378 | 378 |
_heap->decrease(target, (*_arc_order)[it]); |
379 | 379 |
_pred->set(target, it); |
380 | 380 |
} |
381 | 381 |
break; |
382 | 382 |
case Heap::POST_HEAP: |
383 | 383 |
break; |
384 | 384 |
} |
385 | 385 |
} |
386 | 386 |
_arborescence->set((*_pred)[source], true); |
387 | 387 |
} |
388 | 388 |
} |
389 | 389 |
|
390 | 390 |
|
391 | 391 |
public: |
392 | 392 |
|
393 | 393 |
/// \name Named template parameters |
394 | 394 |
|
395 | 395 |
/// @{ |
396 | 396 |
|
397 | 397 |
template <class T> |
398 | 398 |
struct DefArborescenceMapTraits : public Traits { |
399 | 399 |
typedef T ArborescenceMap; |
400 | 400 |
static ArborescenceMap *createArborescenceMap(const Digraph &) |
401 | 401 |
{ |
402 | 402 |
LEMON_ASSERT(false, "ArborescenceMap is not initialized"); |
403 | 403 |
return 0; // ignore warnings |
404 | 404 |
} |
405 | 405 |
}; |
406 | 406 |
|
407 | 407 |
/// \brief \ref named-templ-param "Named parameter" for |
408 | 408 |
/// setting ArborescenceMap type |
409 | 409 |
/// |
410 | 410 |
/// \ref named-templ-param "Named parameter" for setting |
411 | 411 |
/// ArborescenceMap type |
412 | 412 |
template <class T> |
413 | 413 |
struct DefArborescenceMap |
414 | 414 |
: public MinCostArborescence<Digraph, CostMap, |
415 | 415 |
DefArborescenceMapTraits<T> > { |
416 | 416 |
}; |
417 | 417 |
|
418 | 418 |
template <class T> |
419 | 419 |
struct DefPredMapTraits : public Traits { |
420 | 420 |
typedef T PredMap; |
421 | 421 |
static PredMap *createPredMap(const Digraph &) |
422 | 422 |
{ |
423 | 423 |
LEMON_ASSERT(false, "PredMap is not initialized"); |
424 | 424 |
} |
425 | 425 |
}; |
426 | 426 |
|
427 | 427 |
/// \brief \ref named-templ-param "Named parameter" for |
428 | 428 |
/// setting PredMap type |
429 | 429 |
/// |
430 | 430 |
/// \ref named-templ-param "Named parameter" for setting |
431 | 431 |
/// PredMap type |
432 | 432 |
template <class T> |
433 | 433 |
struct DefPredMap |
434 | 434 |
: public MinCostArborescence<Digraph, CostMap, DefPredMapTraits<T> > { |
435 | 435 |
}; |
436 | 436 |
|
437 | 437 |
/// @} |
438 | 438 |
|
439 | 439 |
/// \brief Constructor. |
440 | 440 |
/// |
441 | 441 |
/// \param digraph The digraph the algorithm will run on. |
442 | 442 |
/// \param cost The cost map used by the algorithm. |
443 | 443 |
MinCostArborescence(const Digraph& digraph, const CostMap& cost) |
444 | 444 |
: _digraph(&digraph), _cost(&cost), _pred(0), local_pred(false), |
445 | 445 |
_arborescence(0), local_arborescence(false), |
446 | 446 |
_arc_order(0), _node_order(0), _cost_arcs(0), |
447 | 447 |
_heap_cross_ref(0), _heap(0) {} |
448 | 448 |
|
449 | 449 |
/// \brief Destructor. |
450 | 450 |
~MinCostArborescence() { |
451 | 451 |
destroyStructures(); |
452 | 452 |
} |
453 | 453 |
|
454 | 454 |
/// \brief Sets the arborescence map. |
455 | 455 |
/// |
456 | 456 |
/// Sets the arborescence map. |
457 | 457 |
/// \return <tt>(*this)</tt> |
458 | 458 |
MinCostArborescence& arborescenceMap(ArborescenceMap& m) { |
459 | 459 |
if (local_arborescence) { |
460 | 460 |
delete _arborescence; |
461 | 461 |
} |
462 | 462 |
local_arborescence = false; |
463 | 463 |
_arborescence = &m; |
... | ... |
@@ -557,199 +557,199 @@ |
557 | 557 |
Value dualValue() const { |
558 | 558 |
Value sum = 0; |
559 | 559 |
for (int i = 0; i < int(_dual_variables.size()); ++i) { |
560 | 560 |
sum += _dual_variables[i].value; |
561 | 561 |
} |
562 | 562 |
return sum; |
563 | 563 |
} |
564 | 564 |
|
565 | 565 |
/// \brief Returns the number of the nodes in the dual variable. |
566 | 566 |
/// |
567 | 567 |
/// Returns the number of the nodes in the dual variable. |
568 | 568 |
int dualSize(int k) const { |
569 | 569 |
return _dual_variables[k].end - _dual_variables[k].begin; |
570 | 570 |
} |
571 | 571 |
|
572 | 572 |
/// \brief Returns the value of the dual variable. |
573 | 573 |
/// |
574 | 574 |
/// Returns the the value of the dual variable. |
575 | 575 |
const Value& dualValue(int k) const { |
576 | 576 |
return _dual_variables[k].value; |
577 | 577 |
} |
578 | 578 |
|
579 | 579 |
/// \brief Lemon iterator for get a dual variable. |
580 | 580 |
/// |
581 | 581 |
/// Lemon iterator for get a dual variable. This class provides |
582 | 582 |
/// a common style lemon iterator which gives back a subset of |
583 | 583 |
/// the nodes. |
584 | 584 |
class DualIt { |
585 | 585 |
public: |
586 | 586 |
|
587 | 587 |
/// \brief Constructor. |
588 | 588 |
/// |
589 | 589 |
/// Constructor for get the nodeset of the variable. |
590 | 590 |
DualIt(const MinCostArborescence& algorithm, int variable) |
591 | 591 |
: _algorithm(&algorithm) |
592 | 592 |
{ |
593 | 593 |
_index = _algorithm->_dual_variables[variable].begin; |
594 | 594 |
_last = _algorithm->_dual_variables[variable].end; |
595 | 595 |
} |
596 | 596 |
|
597 | 597 |
/// \brief Conversion to node. |
598 | 598 |
/// |
599 | 599 |
/// Conversion to node. |
600 | 600 |
operator Node() const { |
601 | 601 |
return _algorithm->_dual_node_list[_index]; |
602 | 602 |
} |
603 | 603 |
|
604 | 604 |
/// \brief Increment operator. |
605 | 605 |
/// |
606 | 606 |
/// Increment operator. |
607 | 607 |
DualIt& operator++() { |
608 | 608 |
++_index; |
609 | 609 |
return *this; |
610 | 610 |
} |
611 | 611 |
|
612 | 612 |
/// \brief Validity checking |
613 | 613 |
/// |
614 | 614 |
/// Checks whether the iterator is invalid. |
615 | 615 |
bool operator==(Invalid) const { |
616 | 616 |
return _index == _last; |
617 | 617 |
} |
618 | 618 |
|
619 | 619 |
/// \brief Validity checking |
620 | 620 |
/// |
621 | 621 |
/// Checks whether the iterator is valid. |
622 | 622 |
bool operator!=(Invalid) const { |
623 | 623 |
return _index != _last; |
624 | 624 |
} |
625 | 625 |
|
626 | 626 |
private: |
627 | 627 |
const MinCostArborescence* _algorithm; |
628 | 628 |
int _index, _last; |
629 | 629 |
}; |
630 | 630 |
|
631 | 631 |
/// @} |
632 | 632 |
|
633 | 633 |
/// \name Execution control |
634 | 634 |
/// The simplest way to execute the algorithm is to use |
635 | 635 |
/// one of the member functions called \c run(...). \n |
636 | 636 |
/// If you need more control on the execution, |
637 | 637 |
/// first you must call \ref init(), then you can add several |
638 | 638 |
/// source nodes with \ref addSource(). |
639 | 639 |
/// Finally \ref start() will perform the arborescence |
640 | 640 |
/// computation. |
641 | 641 |
|
642 | 642 |
///@{ |
643 | 643 |
|
644 | 644 |
/// \brief Initializes the internal data structures. |
645 | 645 |
/// |
646 | 646 |
/// Initializes the internal data structures. |
647 | 647 |
/// |
648 | 648 |
void init() { |
649 | 649 |
createStructures(); |
650 | 650 |
_heap->clear(); |
651 | 651 |
for (NodeIt it(*_digraph); it != INVALID; ++it) { |
652 | 652 |
(*_cost_arcs)[it].arc = INVALID; |
653 |
_node_order->set(it, -3); |
|
654 |
_heap_cross_ref->set(it, Heap::PRE_HEAP); |
|
653 |
(*_node_order)[it] = -3; |
|
654 |
(*_heap_cross_ref)[it] = Heap::PRE_HEAP; |
|
655 | 655 |
_pred->set(it, INVALID); |
656 | 656 |
} |
657 | 657 |
for (ArcIt it(*_digraph); it != INVALID; ++it) { |
658 | 658 |
_arborescence->set(it, false); |
659 |
_arc_order |
|
659 |
(*_arc_order)[it] = -1; |
|
660 | 660 |
} |
661 | 661 |
_dual_node_list.clear(); |
662 | 662 |
_dual_variables.clear(); |
663 | 663 |
} |
664 | 664 |
|
665 | 665 |
/// \brief Adds a new source node. |
666 | 666 |
/// |
667 | 667 |
/// Adds a new source node to the algorithm. |
668 | 668 |
void addSource(Node source) { |
669 | 669 |
std::vector<Node> nodes; |
670 | 670 |
nodes.push_back(source); |
671 | 671 |
while (!nodes.empty()) { |
672 | 672 |
Node node = nodes.back(); |
673 | 673 |
nodes.pop_back(); |
674 | 674 |
for (OutArcIt it(*_digraph, node); it != INVALID; ++it) { |
675 | 675 |
Node target = _digraph->target(it); |
676 | 676 |
if ((*_node_order)[target] == -3) { |
677 | 677 |
(*_node_order)[target] = -2; |
678 | 678 |
nodes.push_back(target); |
679 | 679 |
queue.push_back(target); |
680 | 680 |
} |
681 | 681 |
} |
682 | 682 |
} |
683 | 683 |
(*_node_order)[source] = -1; |
684 | 684 |
} |
685 | 685 |
|
686 | 686 |
/// \brief Processes the next node in the priority queue. |
687 | 687 |
/// |
688 | 688 |
/// Processes the next node in the priority queue. |
689 | 689 |
/// |
690 | 690 |
/// \return The processed node. |
691 | 691 |
/// |
692 | 692 |
/// \warning The queue must not be empty! |
693 | 693 |
Node processNextNode() { |
694 | 694 |
Node node = queue.back(); |
695 | 695 |
queue.pop_back(); |
696 | 696 |
if ((*_node_order)[node] == -2) { |
697 | 697 |
Arc arc = prepare(node); |
698 | 698 |
Node source = _digraph->source(arc); |
699 | 699 |
while ((*_node_order)[source] != -1) { |
700 | 700 |
if ((*_node_order)[source] >= 0) { |
701 | 701 |
arc = contract(source); |
702 | 702 |
} else { |
703 | 703 |
arc = prepare(source); |
704 | 704 |
} |
705 | 705 |
source = _digraph->source(arc); |
706 | 706 |
} |
707 | 707 |
finalize(arc); |
708 | 708 |
level_stack.clear(); |
709 | 709 |
} |
710 | 710 |
return node; |
711 | 711 |
} |
712 | 712 |
|
713 | 713 |
/// \brief Returns the number of the nodes to be processed. |
714 | 714 |
/// |
715 | 715 |
/// Returns the number of the nodes to be processed. |
716 | 716 |
int queueSize() const { |
717 | 717 |
return queue.size(); |
718 | 718 |
} |
719 | 719 |
|
720 | 720 |
/// \brief Returns \c false if there are nodes to be processed. |
721 | 721 |
/// |
722 | 722 |
/// Returns \c false if there are nodes to be processed. |
723 | 723 |
bool emptyQueue() const { |
724 | 724 |
return queue.empty(); |
725 | 725 |
} |
726 | 726 |
|
727 | 727 |
/// \brief Executes the algorithm. |
728 | 728 |
/// |
729 | 729 |
/// Executes the algorithm. |
730 | 730 |
/// |
731 | 731 |
/// \pre init() must be called and at least one node should be added |
732 | 732 |
/// with addSource() before using this function. |
733 | 733 |
/// |
734 | 734 |
///\note mca.start() is just a shortcut of the following code. |
735 | 735 |
///\code |
736 | 736 |
///while (!mca.emptyQueue()) { |
737 | 737 |
/// mca.processNextNode(); |
738 | 738 |
///} |
739 | 739 |
///\endcode |
740 | 740 |
void start() { |
741 | 741 |
while (!emptyQueue()) { |
742 | 742 |
processNextNode(); |
743 | 743 |
} |
744 | 744 |
} |
745 | 745 |
|
746 | 746 |
/// \brief Runs %MinCostArborescence algorithm from node \c s. |
747 | 747 |
/// |
748 | 748 |
/// This method runs the %MinCostArborescence algorithm from |
749 | 749 |
/// a root node \c s. |
750 | 750 |
/// |
751 | 751 |
/// \note mca.run(s) is just a shortcut of the following code. |
752 | 752 |
/// \code |
753 | 753 |
/// mca.init(); |
754 | 754 |
/// mca.addSource(s); |
755 | 755 |
/// mca.start(); |
... | ... |
@@ -311,619 +311,619 @@ |
311 | 311 |
|
312 | 312 |
/// \brief Sets the flow map. |
313 | 313 |
/// |
314 | 314 |
/// Sets the flow map. |
315 | 315 |
/// If you don't use this function before calling \ref run() or |
316 | 316 |
/// \ref init(), an instance will be allocated automatically. |
317 | 317 |
/// The destructor deallocates this automatically allocated map, |
318 | 318 |
/// of course. |
319 | 319 |
/// \return <tt>(*this)</tt> |
320 | 320 |
Preflow& flowMap(FlowMap& map) { |
321 | 321 |
if (_local_flow) { |
322 | 322 |
delete _flow; |
323 | 323 |
_local_flow = false; |
324 | 324 |
} |
325 | 325 |
_flow = ↦ |
326 | 326 |
return *this; |
327 | 327 |
} |
328 | 328 |
|
329 | 329 |
/// \brief Sets the source node. |
330 | 330 |
/// |
331 | 331 |
/// Sets the source node. |
332 | 332 |
/// \return <tt>(*this)</tt> |
333 | 333 |
Preflow& source(const Node& node) { |
334 | 334 |
_source = node; |
335 | 335 |
return *this; |
336 | 336 |
} |
337 | 337 |
|
338 | 338 |
/// \brief Sets the target node. |
339 | 339 |
/// |
340 | 340 |
/// Sets the target node. |
341 | 341 |
/// \return <tt>(*this)</tt> |
342 | 342 |
Preflow& target(const Node& node) { |
343 | 343 |
_target = node; |
344 | 344 |
return *this; |
345 | 345 |
} |
346 | 346 |
|
347 | 347 |
/// \brief Sets the elevator used by algorithm. |
348 | 348 |
/// |
349 | 349 |
/// Sets the elevator used by algorithm. |
350 | 350 |
/// If you don't use this function before calling \ref run() or |
351 | 351 |
/// \ref init(), an instance will be allocated automatically. |
352 | 352 |
/// The destructor deallocates this automatically allocated elevator, |
353 | 353 |
/// of course. |
354 | 354 |
/// \return <tt>(*this)</tt> |
355 | 355 |
Preflow& elevator(Elevator& elevator) { |
356 | 356 |
if (_local_level) { |
357 | 357 |
delete _level; |
358 | 358 |
_local_level = false; |
359 | 359 |
} |
360 | 360 |
_level = &elevator; |
361 | 361 |
return *this; |
362 | 362 |
} |
363 | 363 |
|
364 | 364 |
/// \brief Returns a const reference to the elevator. |
365 | 365 |
/// |
366 | 366 |
/// Returns a const reference to the elevator. |
367 | 367 |
/// |
368 | 368 |
/// \pre Either \ref run() or \ref init() must be called before |
369 | 369 |
/// using this function. |
370 | 370 |
const Elevator& elevator() const { |
371 | 371 |
return *_level; |
372 | 372 |
} |
373 | 373 |
|
374 | 374 |
/// \brief Sets the tolerance used by algorithm. |
375 | 375 |
/// |
376 | 376 |
/// Sets the tolerance used by algorithm. |
377 | 377 |
Preflow& tolerance(const Tolerance& tolerance) const { |
378 | 378 |
_tolerance = tolerance; |
379 | 379 |
return *this; |
380 | 380 |
} |
381 | 381 |
|
382 | 382 |
/// \brief Returns a const reference to the tolerance. |
383 | 383 |
/// |
384 | 384 |
/// Returns a const reference to the tolerance. |
385 | 385 |
const Tolerance& tolerance() const { |
386 | 386 |
return tolerance; |
387 | 387 |
} |
388 | 388 |
|
389 | 389 |
/// \name Execution Control |
390 | 390 |
/// The simplest way to execute the preflow algorithm is to use |
391 | 391 |
/// \ref run() or \ref runMinCut().\n |
392 | 392 |
/// If you need more control on the initial solution or the execution, |
393 | 393 |
/// first you have to call one of the \ref init() functions, then |
394 | 394 |
/// \ref startFirstPhase() and if you need it \ref startSecondPhase(). |
395 | 395 |
|
396 | 396 |
///@{ |
397 | 397 |
|
398 | 398 |
/// \brief Initializes the internal data structures. |
399 | 399 |
/// |
400 | 400 |
/// Initializes the internal data structures and sets the initial |
401 | 401 |
/// flow to zero on each arc. |
402 | 402 |
void init() { |
403 | 403 |
createStructures(); |
404 | 404 |
|
405 | 405 |
_phase = true; |
406 | 406 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
407 |
_excess |
|
407 |
(*_excess)[n] = 0; |
|
408 | 408 |
} |
409 | 409 |
|
410 | 410 |
for (ArcIt e(_graph); e != INVALID; ++e) { |
411 | 411 |
_flow->set(e, 0); |
412 | 412 |
} |
413 | 413 |
|
414 | 414 |
typename Digraph::template NodeMap<bool> reached(_graph, false); |
415 | 415 |
|
416 | 416 |
_level->initStart(); |
417 | 417 |
_level->initAddItem(_target); |
418 | 418 |
|
419 | 419 |
std::vector<Node> queue; |
420 |
reached |
|
420 |
reached[_source] = true; |
|
421 | 421 |
|
422 | 422 |
queue.push_back(_target); |
423 |
reached |
|
423 |
reached[_target] = true; |
|
424 | 424 |
while (!queue.empty()) { |
425 | 425 |
_level->initNewLevel(); |
426 | 426 |
std::vector<Node> nqueue; |
427 | 427 |
for (int i = 0; i < int(queue.size()); ++i) { |
428 | 428 |
Node n = queue[i]; |
429 | 429 |
for (InArcIt e(_graph, n); e != INVALID; ++e) { |
430 | 430 |
Node u = _graph.source(e); |
431 | 431 |
if (!reached[u] && _tolerance.positive((*_capacity)[e])) { |
432 |
reached |
|
432 |
reached[u] = true; |
|
433 | 433 |
_level->initAddItem(u); |
434 | 434 |
nqueue.push_back(u); |
435 | 435 |
} |
436 | 436 |
} |
437 | 437 |
} |
438 | 438 |
queue.swap(nqueue); |
439 | 439 |
} |
440 | 440 |
_level->initFinish(); |
441 | 441 |
|
442 | 442 |
for (OutArcIt e(_graph, _source); e != INVALID; ++e) { |
443 | 443 |
if (_tolerance.positive((*_capacity)[e])) { |
444 | 444 |
Node u = _graph.target(e); |
445 | 445 |
if ((*_level)[u] == _level->maxLevel()) continue; |
446 | 446 |
_flow->set(e, (*_capacity)[e]); |
447 |
|
|
447 |
(*_excess)[u] += (*_capacity)[e]; |
|
448 | 448 |
if (u != _target && !_level->active(u)) { |
449 | 449 |
_level->activate(u); |
450 | 450 |
} |
451 | 451 |
} |
452 | 452 |
} |
453 | 453 |
} |
454 | 454 |
|
455 | 455 |
/// \brief Initializes the internal data structures using the |
456 | 456 |
/// given flow map. |
457 | 457 |
/// |
458 | 458 |
/// Initializes the internal data structures and sets the initial |
459 | 459 |
/// flow to the given \c flowMap. The \c flowMap should contain a |
460 | 460 |
/// flow or at least a preflow, i.e. at each node excluding the |
461 | 461 |
/// source node the incoming flow should greater or equal to the |
462 | 462 |
/// outgoing flow. |
463 | 463 |
/// \return \c false if the given \c flowMap is not a preflow. |
464 | 464 |
template <typename FlowMap> |
465 | 465 |
bool init(const FlowMap& flowMap) { |
466 | 466 |
createStructures(); |
467 | 467 |
|
468 | 468 |
for (ArcIt e(_graph); e != INVALID; ++e) { |
469 | 469 |
_flow->set(e, flowMap[e]); |
470 | 470 |
} |
471 | 471 |
|
472 | 472 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
473 | 473 |
Value excess = 0; |
474 | 474 |
for (InArcIt e(_graph, n); e != INVALID; ++e) { |
475 | 475 |
excess += (*_flow)[e]; |
476 | 476 |
} |
477 | 477 |
for (OutArcIt e(_graph, n); e != INVALID; ++e) { |
478 | 478 |
excess -= (*_flow)[e]; |
479 | 479 |
} |
480 | 480 |
if (excess < 0 && n != _source) return false; |
481 |
_excess |
|
481 |
(*_excess)[n] = excess; |
|
482 | 482 |
} |
483 | 483 |
|
484 | 484 |
typename Digraph::template NodeMap<bool> reached(_graph, false); |
485 | 485 |
|
486 | 486 |
_level->initStart(); |
487 | 487 |
_level->initAddItem(_target); |
488 | 488 |
|
489 | 489 |
std::vector<Node> queue; |
490 |
reached |
|
490 |
reached[_source] = true; |
|
491 | 491 |
|
492 | 492 |
queue.push_back(_target); |
493 |
reached |
|
493 |
reached[_target] = true; |
|
494 | 494 |
while (!queue.empty()) { |
495 | 495 |
_level->initNewLevel(); |
496 | 496 |
std::vector<Node> nqueue; |
497 | 497 |
for (int i = 0; i < int(queue.size()); ++i) { |
498 | 498 |
Node n = queue[i]; |
499 | 499 |
for (InArcIt e(_graph, n); e != INVALID; ++e) { |
500 | 500 |
Node u = _graph.source(e); |
501 | 501 |
if (!reached[u] && |
502 | 502 |
_tolerance.positive((*_capacity)[e] - (*_flow)[e])) { |
503 |
reached |
|
503 |
reached[u] = true; |
|
504 | 504 |
_level->initAddItem(u); |
505 | 505 |
nqueue.push_back(u); |
506 | 506 |
} |
507 | 507 |
} |
508 | 508 |
for (OutArcIt e(_graph, n); e != INVALID; ++e) { |
509 | 509 |
Node v = _graph.target(e); |
510 | 510 |
if (!reached[v] && _tolerance.positive((*_flow)[e])) { |
511 |
reached |
|
511 |
reached[v] = true; |
|
512 | 512 |
_level->initAddItem(v); |
513 | 513 |
nqueue.push_back(v); |
514 | 514 |
} |
515 | 515 |
} |
516 | 516 |
} |
517 | 517 |
queue.swap(nqueue); |
518 | 518 |
} |
519 | 519 |
_level->initFinish(); |
520 | 520 |
|
521 | 521 |
for (OutArcIt e(_graph, _source); e != INVALID; ++e) { |
522 | 522 |
Value rem = (*_capacity)[e] - (*_flow)[e]; |
523 | 523 |
if (_tolerance.positive(rem)) { |
524 | 524 |
Node u = _graph.target(e); |
525 | 525 |
if ((*_level)[u] == _level->maxLevel()) continue; |
526 | 526 |
_flow->set(e, (*_capacity)[e]); |
527 |
|
|
527 |
(*_excess)[u] += rem; |
|
528 | 528 |
if (u != _target && !_level->active(u)) { |
529 | 529 |
_level->activate(u); |
530 | 530 |
} |
531 | 531 |
} |
532 | 532 |
} |
533 | 533 |
for (InArcIt e(_graph, _source); e != INVALID; ++e) { |
534 | 534 |
Value rem = (*_flow)[e]; |
535 | 535 |
if (_tolerance.positive(rem)) { |
536 | 536 |
Node v = _graph.source(e); |
537 | 537 |
if ((*_level)[v] == _level->maxLevel()) continue; |
538 | 538 |
_flow->set(e, 0); |
539 |
|
|
539 |
(*_excess)[v] += rem; |
|
540 | 540 |
if (v != _target && !_level->active(v)) { |
541 | 541 |
_level->activate(v); |
542 | 542 |
} |
543 | 543 |
} |
544 | 544 |
} |
545 | 545 |
return true; |
546 | 546 |
} |
547 | 547 |
|
548 | 548 |
/// \brief Starts the first phase of the preflow algorithm. |
549 | 549 |
/// |
550 | 550 |
/// The preflow algorithm consists of two phases, this method runs |
551 | 551 |
/// the first phase. After the first phase the maximum flow value |
552 | 552 |
/// and a minimum value cut can already be computed, although a |
553 | 553 |
/// maximum flow is not yet obtained. So after calling this method |
554 | 554 |
/// \ref flowValue() returns the value of a maximum flow and \ref |
555 | 555 |
/// minCut() returns a minimum cut. |
556 | 556 |
/// \pre One of the \ref init() functions must be called before |
557 | 557 |
/// using this function. |
558 | 558 |
void startFirstPhase() { |
559 | 559 |
_phase = true; |
560 | 560 |
|
561 | 561 |
Node n = _level->highestActive(); |
562 | 562 |
int level = _level->highestActiveLevel(); |
563 | 563 |
while (n != INVALID) { |
564 | 564 |
int num = _node_num; |
565 | 565 |
|
566 | 566 |
while (num > 0 && n != INVALID) { |
567 | 567 |
Value excess = (*_excess)[n]; |
568 | 568 |
int new_level = _level->maxLevel(); |
569 | 569 |
|
570 | 570 |
for (OutArcIt e(_graph, n); e != INVALID; ++e) { |
571 | 571 |
Value rem = (*_capacity)[e] - (*_flow)[e]; |
572 | 572 |
if (!_tolerance.positive(rem)) continue; |
573 | 573 |
Node v = _graph.target(e); |
574 | 574 |
if ((*_level)[v] < level) { |
575 | 575 |
if (!_level->active(v) && v != _target) { |
576 | 576 |
_level->activate(v); |
577 | 577 |
} |
578 | 578 |
if (!_tolerance.less(rem, excess)) { |
579 | 579 |
_flow->set(e, (*_flow)[e] + excess); |
580 |
|
|
580 |
(*_excess)[v] += excess; |
|
581 | 581 |
excess = 0; |
582 | 582 |
goto no_more_push_1; |
583 | 583 |
} else { |
584 | 584 |
excess -= rem; |
585 |
|
|
585 |
(*_excess)[v] += rem; |
|
586 | 586 |
_flow->set(e, (*_capacity)[e]); |
587 | 587 |
} |
588 | 588 |
} else if (new_level > (*_level)[v]) { |
589 | 589 |
new_level = (*_level)[v]; |
590 | 590 |
} |
591 | 591 |
} |
592 | 592 |
|
593 | 593 |
for (InArcIt e(_graph, n); e != INVALID; ++e) { |
594 | 594 |
Value rem = (*_flow)[e]; |
595 | 595 |
if (!_tolerance.positive(rem)) continue; |
596 | 596 |
Node v = _graph.source(e); |
597 | 597 |
if ((*_level)[v] < level) { |
598 | 598 |
if (!_level->active(v) && v != _target) { |
599 | 599 |
_level->activate(v); |
600 | 600 |
} |
601 | 601 |
if (!_tolerance.less(rem, excess)) { |
602 | 602 |
_flow->set(e, (*_flow)[e] - excess); |
603 |
|
|
603 |
(*_excess)[v] += excess; |
|
604 | 604 |
excess = 0; |
605 | 605 |
goto no_more_push_1; |
606 | 606 |
} else { |
607 | 607 |
excess -= rem; |
608 |
|
|
608 |
(*_excess)[v] += rem; |
|
609 | 609 |
_flow->set(e, 0); |
610 | 610 |
} |
611 | 611 |
} else if (new_level > (*_level)[v]) { |
612 | 612 |
new_level = (*_level)[v]; |
613 | 613 |
} |
614 | 614 |
} |
615 | 615 |
|
616 | 616 |
no_more_push_1: |
617 | 617 |
|
618 |
_excess |
|
618 |
(*_excess)[n] = excess; |
|
619 | 619 |
|
620 | 620 |
if (excess != 0) { |
621 | 621 |
if (new_level + 1 < _level->maxLevel()) { |
622 | 622 |
_level->liftHighestActive(new_level + 1); |
623 | 623 |
} else { |
624 | 624 |
_level->liftHighestActiveToTop(); |
625 | 625 |
} |
626 | 626 |
if (_level->emptyLevel(level)) { |
627 | 627 |
_level->liftToTop(level); |
628 | 628 |
} |
629 | 629 |
} else { |
630 | 630 |
_level->deactivate(n); |
631 | 631 |
} |
632 | 632 |
|
633 | 633 |
n = _level->highestActive(); |
634 | 634 |
level = _level->highestActiveLevel(); |
635 | 635 |
--num; |
636 | 636 |
} |
637 | 637 |
|
638 | 638 |
num = _node_num * 20; |
639 | 639 |
while (num > 0 && n != INVALID) { |
640 | 640 |
Value excess = (*_excess)[n]; |
641 | 641 |
int new_level = _level->maxLevel(); |
642 | 642 |
|
643 | 643 |
for (OutArcIt e(_graph, n); e != INVALID; ++e) { |
644 | 644 |
Value rem = (*_capacity)[e] - (*_flow)[e]; |
645 | 645 |
if (!_tolerance.positive(rem)) continue; |
646 | 646 |
Node v = _graph.target(e); |
647 | 647 |
if ((*_level)[v] < level) { |
648 | 648 |
if (!_level->active(v) && v != _target) { |
649 | 649 |
_level->activate(v); |
650 | 650 |
} |
651 | 651 |
if (!_tolerance.less(rem, excess)) { |
652 | 652 |
_flow->set(e, (*_flow)[e] + excess); |
653 |
|
|
653 |
(*_excess)[v] += excess; |
|
654 | 654 |
excess = 0; |
655 | 655 |
goto no_more_push_2; |
656 | 656 |
} else { |
657 | 657 |
excess -= rem; |
658 |
|
|
658 |
(*_excess)[v] += rem; |
|
659 | 659 |
_flow->set(e, (*_capacity)[e]); |
660 | 660 |
} |
661 | 661 |
} else if (new_level > (*_level)[v]) { |
662 | 662 |
new_level = (*_level)[v]; |
663 | 663 |
} |
664 | 664 |
} |
665 | 665 |
|
666 | 666 |
for (InArcIt e(_graph, n); e != INVALID; ++e) { |
667 | 667 |
Value rem = (*_flow)[e]; |
668 | 668 |
if (!_tolerance.positive(rem)) continue; |
669 | 669 |
Node v = _graph.source(e); |
670 | 670 |
if ((*_level)[v] < level) { |
671 | 671 |
if (!_level->active(v) && v != _target) { |
672 | 672 |
_level->activate(v); |
673 | 673 |
} |
674 | 674 |
if (!_tolerance.less(rem, excess)) { |
675 | 675 |
_flow->set(e, (*_flow)[e] - excess); |
676 |
|
|
676 |
(*_excess)[v] += excess; |
|
677 | 677 |
excess = 0; |
678 | 678 |
goto no_more_push_2; |
679 | 679 |
} else { |
680 | 680 |
excess -= rem; |
681 |
|
|
681 |
(*_excess)[v] += rem; |
|
682 | 682 |
_flow->set(e, 0); |
683 | 683 |
} |
684 | 684 |
} else if (new_level > (*_level)[v]) { |
685 | 685 |
new_level = (*_level)[v]; |
686 | 686 |
} |
687 | 687 |
} |
688 | 688 |
|
689 | 689 |
no_more_push_2: |
690 | 690 |
|
691 |
_excess |
|
691 |
(*_excess)[n] = excess; |
|
692 | 692 |
|
693 | 693 |
if (excess != 0) { |
694 | 694 |
if (new_level + 1 < _level->maxLevel()) { |
695 | 695 |
_level->liftActiveOn(level, new_level + 1); |
696 | 696 |
} else { |
697 | 697 |
_level->liftActiveToTop(level); |
698 | 698 |
} |
699 | 699 |
if (_level->emptyLevel(level)) { |
700 | 700 |
_level->liftToTop(level); |
701 | 701 |
} |
702 | 702 |
} else { |
703 | 703 |
_level->deactivate(n); |
704 | 704 |
} |
705 | 705 |
|
706 | 706 |
while (level >= 0 && _level->activeFree(level)) { |
707 | 707 |
--level; |
708 | 708 |
} |
709 | 709 |
if (level == -1) { |
710 | 710 |
n = _level->highestActive(); |
711 | 711 |
level = _level->highestActiveLevel(); |
712 | 712 |
} else { |
713 | 713 |
n = _level->activeOn(level); |
714 | 714 |
} |
715 | 715 |
--num; |
716 | 716 |
} |
717 | 717 |
} |
718 | 718 |
} |
719 | 719 |
|
720 | 720 |
/// \brief Starts the second phase of the preflow algorithm. |
721 | 721 |
/// |
722 | 722 |
/// The preflow algorithm consists of two phases, this method runs |
723 | 723 |
/// the second phase. After calling one of the \ref init() functions |
724 | 724 |
/// and \ref startFirstPhase() and then \ref startSecondPhase(), |
725 | 725 |
/// \ref flowMap() returns a maximum flow, \ref flowValue() returns the |
726 | 726 |
/// value of a maximum flow, \ref minCut() returns a minimum cut |
727 | 727 |
/// \pre One of the \ref init() functions and \ref startFirstPhase() |
728 | 728 |
/// must be called before using this function. |
729 | 729 |
void startSecondPhase() { |
730 | 730 |
_phase = false; |
731 | 731 |
|
732 | 732 |
typename Digraph::template NodeMap<bool> reached(_graph); |
733 | 733 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
734 |
reached |
|
734 |
reached[n] = (*_level)[n] < _level->maxLevel(); |
|
735 | 735 |
} |
736 | 736 |
|
737 | 737 |
_level->initStart(); |
738 | 738 |
_level->initAddItem(_source); |
739 | 739 |
|
740 | 740 |
std::vector<Node> queue; |
741 | 741 |
queue.push_back(_source); |
742 |
reached |
|
742 |
reached[_source] = true; |
|
743 | 743 |
|
744 | 744 |
while (!queue.empty()) { |
745 | 745 |
_level->initNewLevel(); |
746 | 746 |
std::vector<Node> nqueue; |
747 | 747 |
for (int i = 0; i < int(queue.size()); ++i) { |
748 | 748 |
Node n = queue[i]; |
749 | 749 |
for (OutArcIt e(_graph, n); e != INVALID; ++e) { |
750 | 750 |
Node v = _graph.target(e); |
751 | 751 |
if (!reached[v] && _tolerance.positive((*_flow)[e])) { |
752 |
reached |
|
752 |
reached[v] = true; |
|
753 | 753 |
_level->initAddItem(v); |
754 | 754 |
nqueue.push_back(v); |
755 | 755 |
} |
756 | 756 |
} |
757 | 757 |
for (InArcIt e(_graph, n); e != INVALID; ++e) { |
758 | 758 |
Node u = _graph.source(e); |
759 | 759 |
if (!reached[u] && |
760 | 760 |
_tolerance.positive((*_capacity)[e] - (*_flow)[e])) { |
761 |
reached |
|
761 |
reached[u] = true; |
|
762 | 762 |
_level->initAddItem(u); |
763 | 763 |
nqueue.push_back(u); |
764 | 764 |
} |
765 | 765 |
} |
766 | 766 |
} |
767 | 767 |
queue.swap(nqueue); |
768 | 768 |
} |
769 | 769 |
_level->initFinish(); |
770 | 770 |
|
771 | 771 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
772 | 772 |
if (!reached[n]) { |
773 | 773 |
_level->dirtyTopButOne(n); |
774 | 774 |
} else if ((*_excess)[n] > 0 && _target != n) { |
775 | 775 |
_level->activate(n); |
776 | 776 |
} |
777 | 777 |
} |
778 | 778 |
|
779 | 779 |
Node n; |
780 | 780 |
while ((n = _level->highestActive()) != INVALID) { |
781 | 781 |
Value excess = (*_excess)[n]; |
782 | 782 |
int level = _level->highestActiveLevel(); |
783 | 783 |
int new_level = _level->maxLevel(); |
784 | 784 |
|
785 | 785 |
for (OutArcIt e(_graph, n); e != INVALID; ++e) { |
786 | 786 |
Value rem = (*_capacity)[e] - (*_flow)[e]; |
787 | 787 |
if (!_tolerance.positive(rem)) continue; |
788 | 788 |
Node v = _graph.target(e); |
789 | 789 |
if ((*_level)[v] < level) { |
790 | 790 |
if (!_level->active(v) && v != _source) { |
791 | 791 |
_level->activate(v); |
792 | 792 |
} |
793 | 793 |
if (!_tolerance.less(rem, excess)) { |
794 | 794 |
_flow->set(e, (*_flow)[e] + excess); |
795 |
|
|
795 |
(*_excess)[v] += excess; |
|
796 | 796 |
excess = 0; |
797 | 797 |
goto no_more_push; |
798 | 798 |
} else { |
799 | 799 |
excess -= rem; |
800 |
|
|
800 |
(*_excess)[v] += rem; |
|
801 | 801 |
_flow->set(e, (*_capacity)[e]); |
802 | 802 |
} |
803 | 803 |
} else if (new_level > (*_level)[v]) { |
804 | 804 |
new_level = (*_level)[v]; |
805 | 805 |
} |
806 | 806 |
} |
807 | 807 |
|
808 | 808 |
for (InArcIt e(_graph, n); e != INVALID; ++e) { |
809 | 809 |
Value rem = (*_flow)[e]; |
810 | 810 |
if (!_tolerance.positive(rem)) continue; |
811 | 811 |
Node v = _graph.source(e); |
812 | 812 |
if ((*_level)[v] < level) { |
813 | 813 |
if (!_level->active(v) && v != _source) { |
814 | 814 |
_level->activate(v); |
815 | 815 |
} |
816 | 816 |
if (!_tolerance.less(rem, excess)) { |
817 | 817 |
_flow->set(e, (*_flow)[e] - excess); |
818 |
|
|
818 |
(*_excess)[v] += excess; |
|
819 | 819 |
excess = 0; |
820 | 820 |
goto no_more_push; |
821 | 821 |
} else { |
822 | 822 |
excess -= rem; |
823 |
|
|
823 |
(*_excess)[v] += rem; |
|
824 | 824 |
_flow->set(e, 0); |
825 | 825 |
} |
826 | 826 |
} else if (new_level > (*_level)[v]) { |
827 | 827 |
new_level = (*_level)[v]; |
828 | 828 |
} |
829 | 829 |
} |
830 | 830 |
|
831 | 831 |
no_more_push: |
832 | 832 |
|
833 |
_excess |
|
833 |
(*_excess)[n] = excess; |
|
834 | 834 |
|
835 | 835 |
if (excess != 0) { |
836 | 836 |
if (new_level + 1 < _level->maxLevel()) { |
837 | 837 |
_level->liftHighestActive(new_level + 1); |
838 | 838 |
} else { |
839 | 839 |
// Calculation error |
840 | 840 |
_level->liftHighestActiveToTop(); |
841 | 841 |
} |
842 | 842 |
if (_level->emptyLevel(level)) { |
843 | 843 |
// Calculation error |
844 | 844 |
_level->liftToTop(level); |
845 | 845 |
} |
846 | 846 |
} else { |
847 | 847 |
_level->deactivate(n); |
848 | 848 |
} |
849 | 849 |
|
850 | 850 |
} |
851 | 851 |
} |
852 | 852 |
|
853 | 853 |
/// \brief Runs the preflow algorithm. |
854 | 854 |
/// |
855 | 855 |
/// Runs the preflow algorithm. |
856 | 856 |
/// \note pf.run() is just a shortcut of the following code. |
857 | 857 |
/// \code |
858 | 858 |
/// pf.init(); |
859 | 859 |
/// pf.startFirstPhase(); |
860 | 860 |
/// pf.startSecondPhase(); |
861 | 861 |
/// \endcode |
862 | 862 |
void run() { |
863 | 863 |
init(); |
864 | 864 |
startFirstPhase(); |
865 | 865 |
startSecondPhase(); |
866 | 866 |
} |
867 | 867 |
|
868 | 868 |
/// \brief Runs the preflow algorithm to compute the minimum cut. |
869 | 869 |
/// |
870 | 870 |
/// Runs the preflow algorithm to compute the minimum cut. |
871 | 871 |
/// \note pf.runMinCut() is just a shortcut of the following code. |
872 | 872 |
/// \code |
873 | 873 |
/// pf.init(); |
874 | 874 |
/// pf.startFirstPhase(); |
875 | 875 |
/// \endcode |
876 | 876 |
void runMinCut() { |
877 | 877 |
init(); |
878 | 878 |
startFirstPhase(); |
879 | 879 |
} |
880 | 880 |
|
881 | 881 |
/// @} |
882 | 882 |
|
883 | 883 |
/// \name Query Functions |
884 | 884 |
/// The results of the preflow algorithm can be obtained using these |
885 | 885 |
/// functions.\n |
886 | 886 |
/// Either one of the \ref run() "run*()" functions or one of the |
887 | 887 |
/// \ref startFirstPhase() "start*()" functions should be called |
888 | 888 |
/// before using them. |
889 | 889 |
|
890 | 890 |
///@{ |
891 | 891 |
|
892 | 892 |
/// \brief Returns the value of the maximum flow. |
893 | 893 |
/// |
894 | 894 |
/// Returns the value of the maximum flow by returning the excess |
895 | 895 |
/// of the target node. This value equals to the value of |
896 | 896 |
/// the maximum flow already after the first phase of the algorithm. |
897 | 897 |
/// |
898 | 898 |
/// \pre Either \ref run() or \ref init() must be called before |
899 | 899 |
/// using this function. |
900 | 900 |
Value flowValue() const { |
901 | 901 |
return (*_excess)[_target]; |
902 | 902 |
} |
903 | 903 |
|
904 | 904 |
/// \brief Returns the flow on the given arc. |
905 | 905 |
/// |
906 | 906 |
/// Returns the flow on the given arc. This method can |
907 | 907 |
/// be called after the second phase of the algorithm. |
908 | 908 |
/// |
909 | 909 |
/// \pre Either \ref run() or \ref init() must be called before |
910 | 910 |
/// using this function. |
911 | 911 |
Value flow(const Arc& arc) const { |
912 | 912 |
return (*_flow)[arc]; |
913 | 913 |
} |
914 | 914 |
|
915 | 915 |
/// \brief Returns a const reference to the flow map. |
916 | 916 |
/// |
917 | 917 |
/// Returns a const reference to the arc map storing the found flow. |
918 | 918 |
/// This method can be called after the second phase of the algorithm. |
919 | 919 |
/// |
920 | 920 |
/// \pre Either \ref run() or \ref init() must be called before |
921 | 921 |
/// using this function. |
922 | 922 |
const FlowMap& flowMap() const { |
923 | 923 |
return *_flow; |
924 | 924 |
} |
925 | 925 |
|
926 | 926 |
/// \brief Returns \c true when the node is on the source side of the |
927 | 927 |
/// minimum cut. |
928 | 928 |
/// |
929 | 929 |
/// Returns true when the node is on the source side of the found |
... | ... |
@@ -6,142 +6,142 @@ |
6 | 6 |
* Egervary Jeno Kombinatorikus Optimalizalasi Kutatocsoport |
7 | 7 |
* (Egervary Research Group on Combinatorial Optimization, EGRES). |
8 | 8 |
* |
9 | 9 |
* Permission to use, modify and distribute this software is granted |
10 | 10 |
* provided that this copyright notice appears in all copies. For |
11 | 11 |
* precise terms see the accompanying LICENSE file. |
12 | 12 |
* |
13 | 13 |
* This software is provided "AS IS" with no warranty of any kind, |
14 | 14 |
* express or implied, and with no claim as to its suitability for any |
15 | 15 |
* purpose. |
16 | 16 |
* |
17 | 17 |
*/ |
18 | 18 |
|
19 | 19 |
#include <iostream> |
20 | 20 |
#include <vector> |
21 | 21 |
|
22 | 22 |
#include "test_tools.h" |
23 | 23 |
#include <lemon/maps.h> |
24 | 24 |
#include <lemon/kruskal.h> |
25 | 25 |
#include <lemon/list_graph.h> |
26 | 26 |
|
27 | 27 |
#include <lemon/concepts/maps.h> |
28 | 28 |
#include <lemon/concepts/digraph.h> |
29 | 29 |
#include <lemon/concepts/graph.h> |
30 | 30 |
|
31 | 31 |
using namespace std; |
32 | 32 |
using namespace lemon; |
33 | 33 |
|
34 | 34 |
void checkCompileKruskal() |
35 | 35 |
{ |
36 | 36 |
concepts::WriteMap<concepts::Digraph::Arc,bool> w; |
37 | 37 |
concepts::WriteMap<concepts::Graph::Edge,bool> uw; |
38 | 38 |
|
39 | 39 |
concepts::ReadMap<concepts::Digraph::Arc,int> r; |
40 | 40 |
concepts::ReadMap<concepts::Graph::Edge,int> ur; |
41 | 41 |
|
42 | 42 |
concepts::Digraph g; |
43 | 43 |
concepts::Graph ug; |
44 | 44 |
|
45 | 45 |
kruskal(g, r, w); |
46 | 46 |
kruskal(ug, ur, uw); |
47 | 47 |
|
48 | 48 |
std::vector<std::pair<concepts::Digraph::Arc, int> > rs; |
49 | 49 |
std::vector<std::pair<concepts::Graph::Edge, int> > urs; |
50 | 50 |
|
51 | 51 |
kruskal(g, rs, w); |
52 | 52 |
kruskal(ug, urs, uw); |
53 | 53 |
|
54 | 54 |
std::vector<concepts::Digraph::Arc> ws; |
55 | 55 |
std::vector<concepts::Graph::Edge> uws; |
56 | 56 |
|
57 | 57 |
kruskal(g, r, ws.begin()); |
58 | 58 |
kruskal(ug, ur, uws.begin()); |
59 | 59 |
} |
60 | 60 |
|
61 | 61 |
int main() { |
62 | 62 |
|
63 | 63 |
typedef ListGraph::Node Node; |
64 | 64 |
typedef ListGraph::Edge Edge; |
65 | 65 |
typedef ListGraph::NodeIt NodeIt; |
66 | 66 |
typedef ListGraph::ArcIt ArcIt; |
67 | 67 |
|
68 | 68 |
ListGraph G; |
69 | 69 |
|
70 | 70 |
Node s=G.addNode(); |
71 | 71 |
Node v1=G.addNode(); |
72 | 72 |
Node v2=G.addNode(); |
73 | 73 |
Node v3=G.addNode(); |
74 | 74 |
Node v4=G.addNode(); |
75 | 75 |
Node t=G.addNode(); |
76 | 76 |
|
77 | 77 |
Edge e1 = G.addEdge(s, v1); |
78 | 78 |
Edge e2 = G.addEdge(s, v2); |
79 | 79 |
Edge e3 = G.addEdge(v1, v2); |
80 | 80 |
Edge e4 = G.addEdge(v2, v1); |
81 | 81 |
Edge e5 = G.addEdge(v1, v3); |
82 | 82 |
Edge e6 = G.addEdge(v3, v2); |
83 | 83 |
Edge e7 = G.addEdge(v2, v4); |
84 | 84 |
Edge e8 = G.addEdge(v4, v3); |
85 | 85 |
Edge e9 = G.addEdge(v3, t); |
86 | 86 |
Edge e10 = G.addEdge(v4, t); |
87 | 87 |
|
88 | 88 |
typedef ListGraph::EdgeMap<int> ECostMap; |
89 | 89 |
typedef ListGraph::EdgeMap<bool> EBoolMap; |
90 | 90 |
|
91 | 91 |
ECostMap edge_cost_map(G, 2); |
92 | 92 |
EBoolMap tree_map(G); |
93 | 93 |
|
94 | 94 |
|
95 | 95 |
//Test with const map. |
96 | 96 |
check(kruskal(G, ConstMap<ListGraph::Edge,int>(2), tree_map)==10, |
97 | 97 |
"Total cost should be 10"); |
98 | 98 |
//Test with an edge map (filled with uniform costs). |
99 | 99 |
check(kruskal(G, edge_cost_map, tree_map)==10, |
100 | 100 |
"Total cost should be 10"); |
101 | 101 |
|
102 |
edge_cost_map.set(e1, -10); |
|
103 |
edge_cost_map.set(e2, -9); |
|
104 |
edge_cost_map.set(e3, -8); |
|
105 |
edge_cost_map.set(e4, -7); |
|
106 |
edge_cost_map.set(e5, -6); |
|
107 |
edge_cost_map.set(e6, -5); |
|
108 |
edge_cost_map.set(e7, -4); |
|
109 |
edge_cost_map.set(e8, -3); |
|
110 |
edge_cost_map.set(e9, -2); |
|
111 |
edge_cost_map.set(e10, -1); |
|
102 |
edge_cost_map[e1] = -10; |
|
103 |
edge_cost_map[e2] = -9; |
|
104 |
edge_cost_map[e3] = -8; |
|
105 |
edge_cost_map[e4] = -7; |
|
106 |
edge_cost_map[e5] = -6; |
|
107 |
edge_cost_map[e6] = -5; |
|
108 |
edge_cost_map[e7] = -4; |
|
109 |
edge_cost_map[e8] = -3; |
|
110 |
edge_cost_map[e9] = -2; |
|
111 |
edge_cost_map[e10] = -1; |
|
112 | 112 |
|
113 | 113 |
vector<Edge> tree_edge_vec(5); |
114 | 114 |
|
115 | 115 |
//Test with a edge map and inserter. |
116 | 116 |
check(kruskal(G, edge_cost_map, |
117 | 117 |
tree_edge_vec.begin()) |
118 | 118 |
==-31, |
119 | 119 |
"Total cost should be -31."); |
120 | 120 |
|
121 | 121 |
tree_edge_vec.clear(); |
122 | 122 |
|
123 | 123 |
check(kruskal(G, edge_cost_map, |
124 | 124 |
back_inserter(tree_edge_vec)) |
125 | 125 |
==-31, |
126 | 126 |
"Total cost should be -31."); |
127 | 127 |
|
128 | 128 |
// tree_edge_vec.clear(); |
129 | 129 |
|
130 | 130 |
// //The above test could also be coded like this: |
131 | 131 |
// check(kruskal(G, |
132 | 132 |
// makeKruskalMapInput(G, edge_cost_map), |
133 | 133 |
// makeKruskalSequenceOutput(back_inserter(tree_edge_vec))) |
134 | 134 |
// ==-31, |
135 | 135 |
// "Total cost should be -31."); |
136 | 136 |
|
137 | 137 |
check(tree_edge_vec.size()==5,"The tree should have 5 edges."); |
138 | 138 |
|
139 | 139 |
check(tree_edge_vec[0]==e1 && |
140 | 140 |
tree_edge_vec[1]==e2 && |
141 | 141 |
tree_edge_vec[2]==e5 && |
142 | 142 |
tree_edge_vec[3]==e7 && |
143 | 143 |
tree_edge_vec[4]==e9, |
144 | 144 |
"Wrong tree."); |
145 | 145 |
|
146 | 146 |
return 0; |
147 | 147 |
} |
0 comments (0 inline)