... | ... |
@@ -338,995 +338,1016 @@ |
338 | 338 |
/// \brief Constructor. |
339 | 339 |
/// |
340 | 340 |
/// The constructor of the class. |
341 | 341 |
/// |
342 | 342 |
/// \param graph The digraph the algorithm runs on. |
343 | 343 |
CostScaling(const GR& graph) : |
344 | 344 |
_graph(graph), _node_id(graph), _arc_idf(graph), _arc_idb(graph), |
345 | 345 |
_cost_map(_cost_vec), _pi_map(_pi), |
346 | 346 |
INF(std::numeric_limits<Value>::has_infinity ? |
347 | 347 |
std::numeric_limits<Value>::infinity() : |
348 | 348 |
std::numeric_limits<Value>::max()) |
349 | 349 |
{ |
350 | 350 |
// Check the number types |
351 | 351 |
LEMON_ASSERT(std::numeric_limits<Value>::is_signed, |
352 | 352 |
"The flow type of CostScaling must be signed"); |
353 | 353 |
LEMON_ASSERT(std::numeric_limits<Cost>::is_signed, |
354 | 354 |
"The cost type of CostScaling must be signed"); |
355 | 355 |
|
356 | 356 |
// Reset data structures |
357 | 357 |
reset(); |
358 | 358 |
} |
359 | 359 |
|
360 | 360 |
/// \name Parameters |
361 | 361 |
/// The parameters of the algorithm can be specified using these |
362 | 362 |
/// functions. |
363 | 363 |
|
364 | 364 |
/// @{ |
365 | 365 |
|
366 | 366 |
/// \brief Set the lower bounds on the arcs. |
367 | 367 |
/// |
368 | 368 |
/// This function sets the lower bounds on the arcs. |
369 | 369 |
/// If it is not used before calling \ref run(), the lower bounds |
370 | 370 |
/// will be set to zero on all arcs. |
371 | 371 |
/// |
372 | 372 |
/// \param map An arc map storing the lower bounds. |
373 | 373 |
/// Its \c Value type must be convertible to the \c Value type |
374 | 374 |
/// of the algorithm. |
375 | 375 |
/// |
376 | 376 |
/// \return <tt>(*this)</tt> |
377 | 377 |
template <typename LowerMap> |
378 | 378 |
CostScaling& lowerMap(const LowerMap& map) { |
379 | 379 |
_have_lower = true; |
380 | 380 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
381 | 381 |
_lower[_arc_idf[a]] = map[a]; |
382 | 382 |
_lower[_arc_idb[a]] = map[a]; |
383 | 383 |
} |
384 | 384 |
return *this; |
385 | 385 |
} |
386 | 386 |
|
387 | 387 |
/// \brief Set the upper bounds (capacities) on the arcs. |
388 | 388 |
/// |
389 | 389 |
/// This function sets the upper bounds (capacities) on the arcs. |
390 | 390 |
/// If it is not used before calling \ref run(), the upper bounds |
391 | 391 |
/// will be set to \ref INF on all arcs (i.e. the flow value will be |
392 | 392 |
/// unbounded from above). |
393 | 393 |
/// |
394 | 394 |
/// \param map An arc map storing the upper bounds. |
395 | 395 |
/// Its \c Value type must be convertible to the \c Value type |
396 | 396 |
/// of the algorithm. |
397 | 397 |
/// |
398 | 398 |
/// \return <tt>(*this)</tt> |
399 | 399 |
template<typename UpperMap> |
400 | 400 |
CostScaling& upperMap(const UpperMap& map) { |
401 | 401 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
402 | 402 |
_upper[_arc_idf[a]] = map[a]; |
403 | 403 |
} |
404 | 404 |
return *this; |
405 | 405 |
} |
406 | 406 |
|
407 | 407 |
/// \brief Set the costs of the arcs. |
408 | 408 |
/// |
409 | 409 |
/// This function sets the costs of the arcs. |
410 | 410 |
/// If it is not used before calling \ref run(), the costs |
411 | 411 |
/// will be set to \c 1 on all arcs. |
412 | 412 |
/// |
413 | 413 |
/// \param map An arc map storing the costs. |
414 | 414 |
/// Its \c Value type must be convertible to the \c Cost type |
415 | 415 |
/// of the algorithm. |
416 | 416 |
/// |
417 | 417 |
/// \return <tt>(*this)</tt> |
418 | 418 |
template<typename CostMap> |
419 | 419 |
CostScaling& costMap(const CostMap& map) { |
420 | 420 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
421 | 421 |
_scost[_arc_idf[a]] = map[a]; |
422 | 422 |
_scost[_arc_idb[a]] = -map[a]; |
423 | 423 |
} |
424 | 424 |
return *this; |
425 | 425 |
} |
426 | 426 |
|
427 | 427 |
/// \brief Set the supply values of the nodes. |
428 | 428 |
/// |
429 | 429 |
/// This function sets the supply values of the nodes. |
430 | 430 |
/// If neither this function nor \ref stSupply() is used before |
431 | 431 |
/// calling \ref run(), the supply of each node will be set to zero. |
432 | 432 |
/// |
433 | 433 |
/// \param map A node map storing the supply values. |
434 | 434 |
/// Its \c Value type must be convertible to the \c Value type |
435 | 435 |
/// of the algorithm. |
436 | 436 |
/// |
437 | 437 |
/// \return <tt>(*this)</tt> |
438 | 438 |
template<typename SupplyMap> |
439 | 439 |
CostScaling& supplyMap(const SupplyMap& map) { |
440 | 440 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
441 | 441 |
_supply[_node_id[n]] = map[n]; |
442 | 442 |
} |
443 | 443 |
return *this; |
444 | 444 |
} |
445 | 445 |
|
446 | 446 |
/// \brief Set single source and target nodes and a supply value. |
447 | 447 |
/// |
448 | 448 |
/// This function sets a single source node and a single target node |
449 | 449 |
/// and the required flow value. |
450 | 450 |
/// If neither this function nor \ref supplyMap() is used before |
451 | 451 |
/// calling \ref run(), the supply of each node will be set to zero. |
452 | 452 |
/// |
453 | 453 |
/// Using this function has the same effect as using \ref supplyMap() |
454 | 454 |
/// with a map in which \c k is assigned to \c s, \c -k is |
455 | 455 |
/// assigned to \c t and all other nodes have zero supply value. |
456 | 456 |
/// |
457 | 457 |
/// \param s The source node. |
458 | 458 |
/// \param t The target node. |
459 | 459 |
/// \param k The required amount of flow from node \c s to node \c t |
460 | 460 |
/// (i.e. the supply of \c s and the demand of \c t). |
461 | 461 |
/// |
462 | 462 |
/// \return <tt>(*this)</tt> |
463 | 463 |
CostScaling& stSupply(const Node& s, const Node& t, Value k) { |
464 | 464 |
for (int i = 0; i != _res_node_num; ++i) { |
465 | 465 |
_supply[i] = 0; |
466 | 466 |
} |
467 | 467 |
_supply[_node_id[s]] = k; |
468 | 468 |
_supply[_node_id[t]] = -k; |
469 | 469 |
return *this; |
470 | 470 |
} |
471 | 471 |
|
472 | 472 |
/// @} |
473 | 473 |
|
474 | 474 |
/// \name Execution control |
475 | 475 |
/// The algorithm can be executed using \ref run(). |
476 | 476 |
|
477 | 477 |
/// @{ |
478 | 478 |
|
479 | 479 |
/// \brief Run the algorithm. |
480 | 480 |
/// |
481 | 481 |
/// This function runs the algorithm. |
482 | 482 |
/// The paramters can be specified using functions \ref lowerMap(), |
483 | 483 |
/// \ref upperMap(), \ref costMap(), \ref supplyMap(), \ref stSupply(). |
484 | 484 |
/// For example, |
485 | 485 |
/// \code |
486 | 486 |
/// CostScaling<ListDigraph> cs(graph); |
487 | 487 |
/// cs.lowerMap(lower).upperMap(upper).costMap(cost) |
488 | 488 |
/// .supplyMap(sup).run(); |
489 | 489 |
/// \endcode |
490 | 490 |
/// |
491 | 491 |
/// This function can be called more than once. All the given parameters |
492 | 492 |
/// are kept for the next call, unless \ref resetParams() or \ref reset() |
493 | 493 |
/// is used, thus only the modified parameters have to be set again. |
494 | 494 |
/// If the underlying digraph was also modified after the construction |
495 | 495 |
/// of the class (or the last \ref reset() call), then the \ref reset() |
496 | 496 |
/// function must be called. |
497 | 497 |
/// |
498 | 498 |
/// \param method The internal method that will be used in the |
499 | 499 |
/// algorithm. For more information, see \ref Method. |
500 | 500 |
/// \param factor The cost scaling factor. It must be larger than one. |
501 | 501 |
/// |
502 | 502 |
/// \return \c INFEASIBLE if no feasible flow exists, |
503 | 503 |
/// \n \c OPTIMAL if the problem has optimal solution |
504 | 504 |
/// (i.e. it is feasible and bounded), and the algorithm has found |
505 | 505 |
/// optimal flow and node potentials (primal and dual solutions), |
506 | 506 |
/// \n \c UNBOUNDED if the digraph contains an arc of negative cost |
507 | 507 |
/// and infinite upper bound. It means that the objective function |
508 | 508 |
/// is unbounded on that arc, however, note that it could actually be |
509 | 509 |
/// bounded over the feasible flows, but this algroithm cannot handle |
510 | 510 |
/// these cases. |
511 | 511 |
/// |
512 | 512 |
/// \see ProblemType, Method |
513 | 513 |
/// \see resetParams(), reset() |
514 | 514 |
ProblemType run(Method method = PARTIAL_AUGMENT, int factor = 8) { |
515 | 515 |
_alpha = factor; |
516 | 516 |
ProblemType pt = init(); |
517 | 517 |
if (pt != OPTIMAL) return pt; |
518 | 518 |
start(method); |
519 | 519 |
return OPTIMAL; |
520 | 520 |
} |
521 | 521 |
|
522 | 522 |
/// \brief Reset all the parameters that have been given before. |
523 | 523 |
/// |
524 | 524 |
/// This function resets all the paramaters that have been given |
525 | 525 |
/// before using functions \ref lowerMap(), \ref upperMap(), |
526 | 526 |
/// \ref costMap(), \ref supplyMap(), \ref stSupply(). |
527 | 527 |
/// |
528 | 528 |
/// It is useful for multiple \ref run() calls. Basically, all the given |
529 | 529 |
/// parameters are kept for the next \ref run() call, unless |
530 | 530 |
/// \ref resetParams() or \ref reset() is used. |
531 | 531 |
/// If the underlying digraph was also modified after the construction |
532 | 532 |
/// of the class or the last \ref reset() call, then the \ref reset() |
533 | 533 |
/// function must be used, otherwise \ref resetParams() is sufficient. |
534 | 534 |
/// |
535 | 535 |
/// For example, |
536 | 536 |
/// \code |
537 | 537 |
/// CostScaling<ListDigraph> cs(graph); |
538 | 538 |
/// |
539 | 539 |
/// // First run |
540 | 540 |
/// cs.lowerMap(lower).upperMap(upper).costMap(cost) |
541 | 541 |
/// .supplyMap(sup).run(); |
542 | 542 |
/// |
543 | 543 |
/// // Run again with modified cost map (resetParams() is not called, |
544 | 544 |
/// // so only the cost map have to be set again) |
545 | 545 |
/// cost[e] += 100; |
546 | 546 |
/// cs.costMap(cost).run(); |
547 | 547 |
/// |
548 | 548 |
/// // Run again from scratch using resetParams() |
549 | 549 |
/// // (the lower bounds will be set to zero on all arcs) |
550 | 550 |
/// cs.resetParams(); |
551 | 551 |
/// cs.upperMap(capacity).costMap(cost) |
552 | 552 |
/// .supplyMap(sup).run(); |
553 | 553 |
/// \endcode |
554 | 554 |
/// |
555 | 555 |
/// \return <tt>(*this)</tt> |
556 | 556 |
/// |
557 | 557 |
/// \see reset(), run() |
558 | 558 |
CostScaling& resetParams() { |
559 | 559 |
for (int i = 0; i != _res_node_num; ++i) { |
560 | 560 |
_supply[i] = 0; |
561 | 561 |
} |
562 | 562 |
int limit = _first_out[_root]; |
563 | 563 |
for (int j = 0; j != limit; ++j) { |
564 | 564 |
_lower[j] = 0; |
565 | 565 |
_upper[j] = INF; |
566 | 566 |
_scost[j] = _forward[j] ? 1 : -1; |
567 | 567 |
} |
568 | 568 |
for (int j = limit; j != _res_arc_num; ++j) { |
569 | 569 |
_lower[j] = 0; |
570 | 570 |
_upper[j] = INF; |
571 | 571 |
_scost[j] = 0; |
572 | 572 |
_scost[_reverse[j]] = 0; |
573 | 573 |
} |
574 | 574 |
_have_lower = false; |
575 | 575 |
return *this; |
576 | 576 |
} |
577 | 577 |
|
578 | 578 |
/// \brief Reset the internal data structures and all the parameters |
579 | 579 |
/// that have been given before. |
580 | 580 |
/// |
581 | 581 |
/// This function resets the internal data structures and all the |
582 | 582 |
/// paramaters that have been given before using functions \ref lowerMap(), |
583 | 583 |
/// \ref upperMap(), \ref costMap(), \ref supplyMap(), \ref stSupply(). |
584 | 584 |
/// |
585 | 585 |
/// It is useful for multiple \ref run() calls. By default, all the given |
586 | 586 |
/// parameters are kept for the next \ref run() call, unless |
587 | 587 |
/// \ref resetParams() or \ref reset() is used. |
588 | 588 |
/// If the underlying digraph was also modified after the construction |
589 | 589 |
/// of the class or the last \ref reset() call, then the \ref reset() |
590 | 590 |
/// function must be used, otherwise \ref resetParams() is sufficient. |
591 | 591 |
/// |
592 | 592 |
/// See \ref resetParams() for examples. |
593 | 593 |
/// |
594 | 594 |
/// \return <tt>(*this)</tt> |
595 | 595 |
/// |
596 | 596 |
/// \see resetParams(), run() |
597 | 597 |
CostScaling& reset() { |
598 | 598 |
// Resize vectors |
599 | 599 |
_node_num = countNodes(_graph); |
600 | 600 |
_arc_num = countArcs(_graph); |
601 | 601 |
_res_node_num = _node_num + 1; |
602 | 602 |
_res_arc_num = 2 * (_arc_num + _node_num); |
603 | 603 |
_root = _node_num; |
604 | 604 |
|
605 | 605 |
_first_out.resize(_res_node_num + 1); |
606 | 606 |
_forward.resize(_res_arc_num); |
607 | 607 |
_source.resize(_res_arc_num); |
608 | 608 |
_target.resize(_res_arc_num); |
609 | 609 |
_reverse.resize(_res_arc_num); |
610 | 610 |
|
611 | 611 |
_lower.resize(_res_arc_num); |
612 | 612 |
_upper.resize(_res_arc_num); |
613 | 613 |
_scost.resize(_res_arc_num); |
614 | 614 |
_supply.resize(_res_node_num); |
615 | 615 |
|
616 | 616 |
_res_cap.resize(_res_arc_num); |
617 | 617 |
_cost.resize(_res_arc_num); |
618 | 618 |
_pi.resize(_res_node_num); |
619 | 619 |
_excess.resize(_res_node_num); |
620 | 620 |
_next_out.resize(_res_node_num); |
621 | 621 |
|
622 | 622 |
_arc_vec.reserve(_res_arc_num); |
623 | 623 |
_cost_vec.reserve(_res_arc_num); |
624 | 624 |
|
625 | 625 |
// Copy the graph |
626 | 626 |
int i = 0, j = 0, k = 2 * _arc_num + _node_num; |
627 | 627 |
for (NodeIt n(_graph); n != INVALID; ++n, ++i) { |
628 | 628 |
_node_id[n] = i; |
629 | 629 |
} |
630 | 630 |
i = 0; |
631 | 631 |
for (NodeIt n(_graph); n != INVALID; ++n, ++i) { |
632 | 632 |
_first_out[i] = j; |
633 | 633 |
for (OutArcIt a(_graph, n); a != INVALID; ++a, ++j) { |
634 | 634 |
_arc_idf[a] = j; |
635 | 635 |
_forward[j] = true; |
636 | 636 |
_source[j] = i; |
637 | 637 |
_target[j] = _node_id[_graph.runningNode(a)]; |
638 | 638 |
} |
639 | 639 |
for (InArcIt a(_graph, n); a != INVALID; ++a, ++j) { |
640 | 640 |
_arc_idb[a] = j; |
641 | 641 |
_forward[j] = false; |
642 | 642 |
_source[j] = i; |
643 | 643 |
_target[j] = _node_id[_graph.runningNode(a)]; |
644 | 644 |
} |
645 | 645 |
_forward[j] = false; |
646 | 646 |
_source[j] = i; |
647 | 647 |
_target[j] = _root; |
648 | 648 |
_reverse[j] = k; |
649 | 649 |
_forward[k] = true; |
650 | 650 |
_source[k] = _root; |
651 | 651 |
_target[k] = i; |
652 | 652 |
_reverse[k] = j; |
653 | 653 |
++j; ++k; |
654 | 654 |
} |
655 | 655 |
_first_out[i] = j; |
656 | 656 |
_first_out[_res_node_num] = k; |
657 | 657 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
658 | 658 |
int fi = _arc_idf[a]; |
659 | 659 |
int bi = _arc_idb[a]; |
660 | 660 |
_reverse[fi] = bi; |
661 | 661 |
_reverse[bi] = fi; |
662 | 662 |
} |
663 | 663 |
|
664 | 664 |
// Reset parameters |
665 | 665 |
resetParams(); |
666 | 666 |
return *this; |
667 | 667 |
} |
668 | 668 |
|
669 | 669 |
/// @} |
670 | 670 |
|
671 | 671 |
/// \name Query Functions |
672 | 672 |
/// The results of the algorithm can be obtained using these |
673 | 673 |
/// functions.\n |
674 | 674 |
/// The \ref run() function must be called before using them. |
675 | 675 |
|
676 | 676 |
/// @{ |
677 | 677 |
|
678 | 678 |
/// \brief Return the total cost of the found flow. |
679 | 679 |
/// |
680 | 680 |
/// This function returns the total cost of the found flow. |
681 | 681 |
/// Its complexity is O(e). |
682 | 682 |
/// |
683 | 683 |
/// \note The return type of the function can be specified as a |
684 | 684 |
/// template parameter. For example, |
685 | 685 |
/// \code |
686 | 686 |
/// cs.totalCost<double>(); |
687 | 687 |
/// \endcode |
688 | 688 |
/// It is useful if the total cost cannot be stored in the \c Cost |
689 | 689 |
/// type of the algorithm, which is the default return type of the |
690 | 690 |
/// function. |
691 | 691 |
/// |
692 | 692 |
/// \pre \ref run() must be called before using this function. |
693 | 693 |
template <typename Number> |
694 | 694 |
Number totalCost() const { |
695 | 695 |
Number c = 0; |
696 | 696 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
697 | 697 |
int i = _arc_idb[a]; |
698 | 698 |
c += static_cast<Number>(_res_cap[i]) * |
699 | 699 |
(-static_cast<Number>(_scost[i])); |
700 | 700 |
} |
701 | 701 |
return c; |
702 | 702 |
} |
703 | 703 |
|
704 | 704 |
#ifndef DOXYGEN |
705 | 705 |
Cost totalCost() const { |
706 | 706 |
return totalCost<Cost>(); |
707 | 707 |
} |
708 | 708 |
#endif |
709 | 709 |
|
710 | 710 |
/// \brief Return the flow on the given arc. |
711 | 711 |
/// |
712 | 712 |
/// This function returns the flow on the given arc. |
713 | 713 |
/// |
714 | 714 |
/// \pre \ref run() must be called before using this function. |
715 | 715 |
Value flow(const Arc& a) const { |
716 | 716 |
return _res_cap[_arc_idb[a]]; |
717 | 717 |
} |
718 | 718 |
|
719 | 719 |
/// \brief Return the flow map (the primal solution). |
720 | 720 |
/// |
721 | 721 |
/// This function copies the flow value on each arc into the given |
722 | 722 |
/// map. The \c Value type of the algorithm must be convertible to |
723 | 723 |
/// the \c Value type of the map. |
724 | 724 |
/// |
725 | 725 |
/// \pre \ref run() must be called before using this function. |
726 | 726 |
template <typename FlowMap> |
727 | 727 |
void flowMap(FlowMap &map) const { |
728 | 728 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
729 | 729 |
map.set(a, _res_cap[_arc_idb[a]]); |
730 | 730 |
} |
731 | 731 |
} |
732 | 732 |
|
733 | 733 |
/// \brief Return the potential (dual value) of the given node. |
734 | 734 |
/// |
735 | 735 |
/// This function returns the potential (dual value) of the |
736 | 736 |
/// given node. |
737 | 737 |
/// |
738 | 738 |
/// \pre \ref run() must be called before using this function. |
739 | 739 |
Cost potential(const Node& n) const { |
740 | 740 |
return static_cast<Cost>(_pi[_node_id[n]]); |
741 | 741 |
} |
742 | 742 |
|
743 | 743 |
/// \brief Return the potential map (the dual solution). |
744 | 744 |
/// |
745 | 745 |
/// This function copies the potential (dual value) of each node |
746 | 746 |
/// into the given map. |
747 | 747 |
/// The \c Cost type of the algorithm must be convertible to the |
748 | 748 |
/// \c Value type of the map. |
749 | 749 |
/// |
750 | 750 |
/// \pre \ref run() must be called before using this function. |
751 | 751 |
template <typename PotentialMap> |
752 | 752 |
void potentialMap(PotentialMap &map) const { |
753 | 753 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
754 | 754 |
map.set(n, static_cast<Cost>(_pi[_node_id[n]])); |
755 | 755 |
} |
756 | 756 |
} |
757 | 757 |
|
758 | 758 |
/// @} |
759 | 759 |
|
760 | 760 |
private: |
761 | 761 |
|
762 | 762 |
// Initialize the algorithm |
763 | 763 |
ProblemType init() { |
764 | 764 |
if (_res_node_num <= 1) return INFEASIBLE; |
765 | 765 |
|
766 | 766 |
// Check the sum of supply values |
767 | 767 |
_sum_supply = 0; |
768 | 768 |
for (int i = 0; i != _root; ++i) { |
769 | 769 |
_sum_supply += _supply[i]; |
770 | 770 |
} |
771 | 771 |
if (_sum_supply > 0) return INFEASIBLE; |
772 | 772 |
|
773 | 773 |
|
774 | 774 |
// Initialize vectors |
775 | 775 |
for (int i = 0; i != _res_node_num; ++i) { |
776 | 776 |
_pi[i] = 0; |
777 | 777 |
_excess[i] = _supply[i]; |
778 | 778 |
} |
779 | 779 |
|
780 | 780 |
// Remove infinite upper bounds and check negative arcs |
781 | 781 |
const Value MAX = std::numeric_limits<Value>::max(); |
782 | 782 |
int last_out; |
783 | 783 |
if (_have_lower) { |
784 | 784 |
for (int i = 0; i != _root; ++i) { |
785 | 785 |
last_out = _first_out[i+1]; |
786 | 786 |
for (int j = _first_out[i]; j != last_out; ++j) { |
787 | 787 |
if (_forward[j]) { |
788 | 788 |
Value c = _scost[j] < 0 ? _upper[j] : _lower[j]; |
789 | 789 |
if (c >= MAX) return UNBOUNDED; |
790 | 790 |
_excess[i] -= c; |
791 | 791 |
_excess[_target[j]] += c; |
792 | 792 |
} |
793 | 793 |
} |
794 | 794 |
} |
795 | 795 |
} else { |
796 | 796 |
for (int i = 0; i != _root; ++i) { |
797 | 797 |
last_out = _first_out[i+1]; |
798 | 798 |
for (int j = _first_out[i]; j != last_out; ++j) { |
799 | 799 |
if (_forward[j] && _scost[j] < 0) { |
800 | 800 |
Value c = _upper[j]; |
801 | 801 |
if (c >= MAX) return UNBOUNDED; |
802 | 802 |
_excess[i] -= c; |
803 | 803 |
_excess[_target[j]] += c; |
804 | 804 |
} |
805 | 805 |
} |
806 | 806 |
} |
807 | 807 |
} |
808 | 808 |
Value ex, max_cap = 0; |
809 | 809 |
for (int i = 0; i != _res_node_num; ++i) { |
810 | 810 |
ex = _excess[i]; |
811 | 811 |
_excess[i] = 0; |
812 | 812 |
if (ex < 0) max_cap -= ex; |
813 | 813 |
} |
814 | 814 |
for (int j = 0; j != _res_arc_num; ++j) { |
815 | 815 |
if (_upper[j] >= MAX) _upper[j] = max_cap; |
816 | 816 |
} |
817 | 817 |
|
818 | 818 |
// Initialize the large cost vector and the epsilon parameter |
819 | 819 |
_epsilon = 0; |
820 | 820 |
LargeCost lc; |
821 | 821 |
for (int i = 0; i != _root; ++i) { |
822 | 822 |
last_out = _first_out[i+1]; |
823 | 823 |
for (int j = _first_out[i]; j != last_out; ++j) { |
824 | 824 |
lc = static_cast<LargeCost>(_scost[j]) * _res_node_num * _alpha; |
825 | 825 |
_cost[j] = lc; |
826 | 826 |
if (lc > _epsilon) _epsilon = lc; |
827 | 827 |
} |
828 | 828 |
} |
829 | 829 |
_epsilon /= _alpha; |
830 | 830 |
|
831 | 831 |
// Initialize maps for Circulation and remove non-zero lower bounds |
832 | 832 |
ConstMap<Arc, Value> low(0); |
833 | 833 |
typedef typename Digraph::template ArcMap<Value> ValueArcMap; |
834 | 834 |
typedef typename Digraph::template NodeMap<Value> ValueNodeMap; |
835 | 835 |
ValueArcMap cap(_graph), flow(_graph); |
836 | 836 |
ValueNodeMap sup(_graph); |
837 | 837 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
838 | 838 |
sup[n] = _supply[_node_id[n]]; |
839 | 839 |
} |
840 | 840 |
if (_have_lower) { |
841 | 841 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
842 | 842 |
int j = _arc_idf[a]; |
843 | 843 |
Value c = _lower[j]; |
844 | 844 |
cap[a] = _upper[j] - c; |
845 | 845 |
sup[_graph.source(a)] -= c; |
846 | 846 |
sup[_graph.target(a)] += c; |
847 | 847 |
} |
848 | 848 |
} else { |
849 | 849 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
850 | 850 |
cap[a] = _upper[_arc_idf[a]]; |
851 | 851 |
} |
852 | 852 |
} |
853 | 853 |
|
854 | 854 |
_sup_node_num = 0; |
855 | 855 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
856 | 856 |
if (sup[n] > 0) ++_sup_node_num; |
857 | 857 |
} |
858 | 858 |
|
859 | 859 |
// Find a feasible flow using Circulation |
860 | 860 |
Circulation<Digraph, ConstMap<Arc, Value>, ValueArcMap, ValueNodeMap> |
861 | 861 |
circ(_graph, low, cap, sup); |
862 | 862 |
if (!circ.flowMap(flow).run()) return INFEASIBLE; |
863 | 863 |
|
864 | 864 |
// Set residual capacities and handle GEQ supply type |
865 | 865 |
if (_sum_supply < 0) { |
866 | 866 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
867 | 867 |
Value fa = flow[a]; |
868 | 868 |
_res_cap[_arc_idf[a]] = cap[a] - fa; |
869 | 869 |
_res_cap[_arc_idb[a]] = fa; |
870 | 870 |
sup[_graph.source(a)] -= fa; |
871 | 871 |
sup[_graph.target(a)] += fa; |
872 | 872 |
} |
873 | 873 |
for (NodeIt n(_graph); n != INVALID; ++n) { |
874 | 874 |
_excess[_node_id[n]] = sup[n]; |
875 | 875 |
} |
876 | 876 |
for (int a = _first_out[_root]; a != _res_arc_num; ++a) { |
877 | 877 |
int u = _target[a]; |
878 | 878 |
int ra = _reverse[a]; |
879 | 879 |
_res_cap[a] = -_sum_supply + 1; |
880 | 880 |
_res_cap[ra] = -_excess[u]; |
881 | 881 |
_cost[a] = 0; |
882 | 882 |
_cost[ra] = 0; |
883 | 883 |
_excess[u] = 0; |
884 | 884 |
} |
885 | 885 |
} else { |
886 | 886 |
for (ArcIt a(_graph); a != INVALID; ++a) { |
887 | 887 |
Value fa = flow[a]; |
888 | 888 |
_res_cap[_arc_idf[a]] = cap[a] - fa; |
889 | 889 |
_res_cap[_arc_idb[a]] = fa; |
890 | 890 |
} |
891 | 891 |
for (int a = _first_out[_root]; a != _res_arc_num; ++a) { |
892 | 892 |
int ra = _reverse[a]; |
893 | 893 |
_res_cap[a] = 0; |
894 | 894 |
_res_cap[ra] = 0; |
895 | 895 |
_cost[a] = 0; |
896 | 896 |
_cost[ra] = 0; |
897 | 897 |
} |
898 | 898 |
} |
899 | 899 |
|
900 | 900 |
// Initialize data structures for buckets |
901 | 901 |
_max_rank = _alpha * _res_node_num; |
902 | 902 |
_buckets.resize(_max_rank); |
903 | 903 |
_bucket_next.resize(_res_node_num + 1); |
904 | 904 |
_bucket_prev.resize(_res_node_num + 1); |
905 | 905 |
_rank.resize(_res_node_num + 1); |
906 | 906 |
|
907 | 907 |
return OPTIMAL; |
908 | 908 |
} |
909 | 909 |
|
910 | 910 |
// Execute the algorithm and transform the results |
911 | 911 |
void start(Method method) { |
912 | 912 |
const int MAX_PARTIAL_PATH_LENGTH = 4; |
913 | 913 |
|
914 | 914 |
switch (method) { |
915 | 915 |
case PUSH: |
916 | 916 |
startPush(); |
917 | 917 |
break; |
918 | 918 |
case AUGMENT: |
919 | 919 |
startAugment(_res_node_num - 1); |
920 | 920 |
break; |
921 | 921 |
case PARTIAL_AUGMENT: |
922 | 922 |
startAugment(MAX_PARTIAL_PATH_LENGTH); |
923 | 923 |
break; |
924 | 924 |
} |
925 | 925 |
|
926 | 926 |
// Compute node potentials for the original costs |
927 | 927 |
_arc_vec.clear(); |
928 | 928 |
_cost_vec.clear(); |
929 | 929 |
for (int j = 0; j != _res_arc_num; ++j) { |
930 | 930 |
if (_res_cap[j] > 0) { |
931 | 931 |
_arc_vec.push_back(IntPair(_source[j], _target[j])); |
932 | 932 |
_cost_vec.push_back(_scost[j]); |
933 | 933 |
} |
934 | 934 |
} |
935 | 935 |
_sgr.build(_res_node_num, _arc_vec.begin(), _arc_vec.end()); |
936 | 936 |
|
937 | 937 |
typename BellmanFord<StaticDigraph, LargeCostArcMap> |
938 | 938 |
::template SetDistMap<LargeCostNodeMap>::Create bf(_sgr, _cost_map); |
939 | 939 |
bf.distMap(_pi_map); |
940 | 940 |
bf.init(0); |
941 | 941 |
bf.start(); |
942 | 942 |
|
943 | 943 |
// Handle non-zero lower bounds |
944 | 944 |
if (_have_lower) { |
945 | 945 |
int limit = _first_out[_root]; |
946 | 946 |
for (int j = 0; j != limit; ++j) { |
947 | 947 |
if (!_forward[j]) _res_cap[j] += _lower[j]; |
948 | 948 |
} |
949 | 949 |
} |
950 | 950 |
} |
951 | 951 |
|
952 | 952 |
// Initialize a cost scaling phase |
953 | 953 |
void initPhase() { |
954 | 954 |
// Saturate arcs not satisfying the optimality condition |
955 | 955 |
for (int u = 0; u != _res_node_num; ++u) { |
956 | 956 |
int last_out = _first_out[u+1]; |
957 | 957 |
LargeCost pi_u = _pi[u]; |
958 | 958 |
for (int a = _first_out[u]; a != last_out; ++a) { |
959 | 959 |
Value delta = _res_cap[a]; |
960 | 960 |
if (delta > 0) { |
961 | 961 |
int v = _target[a]; |
962 | 962 |
if (_cost[a] + pi_u - _pi[v] < 0) { |
963 | 963 |
_excess[u] -= delta; |
964 | 964 |
_excess[v] += delta; |
965 | 965 |
_res_cap[a] = 0; |
966 | 966 |
_res_cap[_reverse[a]] += delta; |
967 | 967 |
} |
968 | 968 |
} |
969 | 969 |
} |
970 | 970 |
} |
971 | 971 |
|
972 | 972 |
// Find active nodes (i.e. nodes with positive excess) |
973 | 973 |
for (int u = 0; u != _res_node_num; ++u) { |
974 | 974 |
if (_excess[u] > 0) _active_nodes.push_back(u); |
975 | 975 |
} |
976 | 976 |
|
977 | 977 |
// Initialize the next arcs |
978 | 978 |
for (int u = 0; u != _res_node_num; ++u) { |
979 | 979 |
_next_out[u] = _first_out[u]; |
980 | 980 |
} |
981 | 981 |
} |
982 | 982 |
|
983 | 983 |
// Early termination heuristic |
984 | 984 |
bool earlyTermination() { |
985 | 985 |
const double EARLY_TERM_FACTOR = 3.0; |
986 | 986 |
|
987 | 987 |
// Build a static residual graph |
988 | 988 |
_arc_vec.clear(); |
989 | 989 |
_cost_vec.clear(); |
990 | 990 |
for (int j = 0; j != _res_arc_num; ++j) { |
991 | 991 |
if (_res_cap[j] > 0) { |
992 | 992 |
_arc_vec.push_back(IntPair(_source[j], _target[j])); |
993 | 993 |
_cost_vec.push_back(_cost[j] + 1); |
994 | 994 |
} |
995 | 995 |
} |
996 | 996 |
_sgr.build(_res_node_num, _arc_vec.begin(), _arc_vec.end()); |
997 | 997 |
|
998 | 998 |
// Run Bellman-Ford algorithm to check if the current flow is optimal |
999 | 999 |
BellmanFord<StaticDigraph, LargeCostArcMap> bf(_sgr, _cost_map); |
1000 | 1000 |
bf.init(0); |
1001 | 1001 |
bool done = false; |
1002 | 1002 |
int K = int(EARLY_TERM_FACTOR * std::sqrt(double(_res_node_num))); |
1003 | 1003 |
for (int i = 0; i < K && !done; ++i) { |
1004 | 1004 |
done = bf.processNextWeakRound(); |
1005 | 1005 |
} |
1006 | 1006 |
return done; |
1007 | 1007 |
} |
1008 | 1008 |
|
1009 | 1009 |
// Global potential update heuristic |
1010 | 1010 |
void globalUpdate() { |
1011 | 1011 |
const int bucket_end = _root + 1; |
1012 | 1012 |
|
1013 | 1013 |
// Initialize buckets |
1014 | 1014 |
for (int r = 0; r != _max_rank; ++r) { |
1015 | 1015 |
_buckets[r] = bucket_end; |
1016 | 1016 |
} |
1017 | 1017 |
Value total_excess = 0; |
1018 | 1018 |
int b0 = bucket_end; |
1019 | 1019 |
for (int i = 0; i != _res_node_num; ++i) { |
1020 | 1020 |
if (_excess[i] < 0) { |
1021 | 1021 |
_rank[i] = 0; |
1022 | 1022 |
_bucket_next[i] = b0; |
1023 | 1023 |
_bucket_prev[b0] = i; |
1024 | 1024 |
b0 = i; |
1025 | 1025 |
} else { |
1026 | 1026 |
total_excess += _excess[i]; |
1027 | 1027 |
_rank[i] = _max_rank; |
1028 | 1028 |
} |
1029 | 1029 |
} |
1030 | 1030 |
if (total_excess == 0) return; |
1031 | 1031 |
_buckets[0] = b0; |
1032 | 1032 |
|
1033 | 1033 |
// Search the buckets |
1034 | 1034 |
int r = 0; |
1035 | 1035 |
for ( ; r != _max_rank; ++r) { |
1036 | 1036 |
while (_buckets[r] != bucket_end) { |
1037 | 1037 |
// Remove the first node from the current bucket |
1038 | 1038 |
int u = _buckets[r]; |
1039 | 1039 |
_buckets[r] = _bucket_next[u]; |
1040 | 1040 |
|
1041 | 1041 |
// Search the incomming arcs of u |
1042 | 1042 |
LargeCost pi_u = _pi[u]; |
1043 | 1043 |
int last_out = _first_out[u+1]; |
1044 | 1044 |
for (int a = _first_out[u]; a != last_out; ++a) { |
1045 | 1045 |
int ra = _reverse[a]; |
1046 | 1046 |
if (_res_cap[ra] > 0) { |
1047 | 1047 |
int v = _source[ra]; |
1048 | 1048 |
int old_rank_v = _rank[v]; |
1049 | 1049 |
if (r < old_rank_v) { |
1050 | 1050 |
// Compute the new rank of v |
1051 | 1051 |
LargeCost nrc = (_cost[ra] + _pi[v] - pi_u) / _epsilon; |
1052 | 1052 |
int new_rank_v = old_rank_v; |
1053 | 1053 |
if (nrc < LargeCost(_max_rank)) { |
1054 | 1054 |
new_rank_v = r + 1 + static_cast<int>(nrc); |
1055 | 1055 |
} |
1056 | 1056 |
|
1057 | 1057 |
// Change the rank of v |
1058 | 1058 |
if (new_rank_v < old_rank_v) { |
1059 | 1059 |
_rank[v] = new_rank_v; |
1060 | 1060 |
_next_out[v] = _first_out[v]; |
1061 | 1061 |
|
1062 | 1062 |
// Remove v from its old bucket |
1063 | 1063 |
if (old_rank_v < _max_rank) { |
1064 | 1064 |
if (_buckets[old_rank_v] == v) { |
1065 | 1065 |
_buckets[old_rank_v] = _bucket_next[v]; |
1066 | 1066 |
} else { |
1067 | 1067 |
int pv = _bucket_prev[v], nv = _bucket_next[v]; |
1068 | 1068 |
_bucket_next[pv] = nv; |
1069 | 1069 |
_bucket_prev[nv] = pv; |
1070 | 1070 |
} |
1071 | 1071 |
} |
1072 | 1072 |
|
1073 | 1073 |
// Insert v into its new bucket |
1074 | 1074 |
int nv = _buckets[new_rank_v]; |
1075 | 1075 |
_bucket_next[v] = nv; |
1076 | 1076 |
_bucket_prev[nv] = v; |
1077 | 1077 |
_buckets[new_rank_v] = v; |
1078 | 1078 |
} |
1079 | 1079 |
} |
1080 | 1080 |
} |
1081 | 1081 |
} |
1082 | 1082 |
|
1083 | 1083 |
// Finish search if there are no more active nodes |
1084 | 1084 |
if (_excess[u] > 0) { |
1085 | 1085 |
total_excess -= _excess[u]; |
1086 | 1086 |
if (total_excess <= 0) break; |
1087 | 1087 |
} |
1088 | 1088 |
} |
1089 | 1089 |
if (total_excess <= 0) break; |
1090 | 1090 |
} |
1091 | 1091 |
|
1092 | 1092 |
// Relabel nodes |
1093 | 1093 |
for (int u = 0; u != _res_node_num; ++u) { |
1094 | 1094 |
int k = std::min(_rank[u], r); |
1095 | 1095 |
if (k > 0) { |
1096 | 1096 |
_pi[u] -= _epsilon * k; |
1097 | 1097 |
_next_out[u] = _first_out[u]; |
1098 | 1098 |
} |
1099 | 1099 |
} |
1100 | 1100 |
} |
1101 | 1101 |
|
1102 | 1102 |
/// Execute the algorithm performing augment and relabel operations |
1103 | 1103 |
void startAugment(int max_length) { |
1104 | 1104 |
// Paramters for heuristics |
1105 | 1105 |
const int EARLY_TERM_EPSILON_LIMIT = 1000; |
1106 |
const double GLOBAL_UPDATE_FACTOR = 3.0; |
|
1107 |
|
|
1108 |
const |
|
1106 |
const double GLOBAL_UPDATE_FACTOR = 1.0; |
|
1107 |
const int global_update_skip = static_cast<int>(GLOBAL_UPDATE_FACTOR * |
|
1109 | 1108 |
(_res_node_num + _sup_node_num * _sup_node_num)); |
1110 |
int next_update_limit = global_update_freq; |
|
1111 |
|
|
1112 |
int |
|
1109 |
int next_global_update_limit = global_update_skip; |
|
1113 | 1110 |
|
1114 | 1111 |
// Perform cost scaling phases |
1115 |
|
|
1112 |
IntVector path; |
|
1113 |
BoolVector path_arc(_res_arc_num, false); |
|
1114 |
int relabel_cnt = 0; |
|
1116 | 1115 |
for ( ; _epsilon >= 1; _epsilon = _epsilon < _alpha && _epsilon > 1 ? |
1117 | 1116 |
1 : _epsilon / _alpha ) |
1118 | 1117 |
{ |
1119 | 1118 |
// Early termination heuristic |
1120 | 1119 |
if (_epsilon <= EARLY_TERM_EPSILON_LIMIT) { |
1121 | 1120 |
if (earlyTermination()) break; |
1122 | 1121 |
} |
1123 | 1122 |
|
1124 | 1123 |
// Initialize current phase |
1125 | 1124 |
initPhase(); |
1126 | 1125 |
|
1127 | 1126 |
// Perform partial augment and relabel operations |
1128 | 1127 |
while (true) { |
1129 | 1128 |
// Select an active node (FIFO selection) |
1130 | 1129 |
while (_active_nodes.size() > 0 && |
1131 | 1130 |
_excess[_active_nodes.front()] <= 0) { |
1132 | 1131 |
_active_nodes.pop_front(); |
1133 | 1132 |
} |
1134 | 1133 |
if (_active_nodes.size() == 0) break; |
1135 | 1134 |
int start = _active_nodes.front(); |
1136 | 1135 |
|
1137 | 1136 |
// Find an augmenting path from the start node |
1138 |
path.clear(); |
|
1139 | 1137 |
int tip = start; |
1140 |
while ( |
|
1138 |
while (int(path.size()) < max_length && _excess[tip] >= 0) { |
|
1141 | 1139 |
int u; |
1142 |
LargeCost |
|
1140 |
LargeCost rc, min_red_cost = std::numeric_limits<LargeCost>::max(); |
|
1141 |
LargeCost pi_tip = _pi[tip]; |
|
1143 | 1142 |
int last_out = _first_out[tip+1]; |
1144 | 1143 |
for (int a = _next_out[tip]; a != last_out; ++a) { |
1145 |
u = _target[a]; |
|
1146 |
if (_res_cap[a] > 0 && _cost[a] + pi_tip - _pi[u] < 0) { |
|
1147 |
path.push_back(a); |
|
1148 |
_next_out[tip] = a; |
|
1149 |
tip = u; |
|
1150 |
goto next_step; |
|
1144 |
if (_res_cap[a] > 0) { |
|
1145 |
u = _target[a]; |
|
1146 |
rc = _cost[a] + pi_tip - _pi[u]; |
|
1147 |
if (rc < 0) { |
|
1148 |
path.push_back(a); |
|
1149 |
_next_out[tip] = a; |
|
1150 |
if (path_arc[a]) { |
|
1151 |
goto augment; // a cycle is found, stop path search |
|
1152 |
} |
|
1153 |
tip = u; |
|
1154 |
path_arc[a] = true; |
|
1155 |
goto next_step; |
|
1156 |
} |
|
1157 |
else if (rc < min_red_cost) { |
|
1158 |
min_red_cost = rc; |
|
1159 |
} |
|
1151 | 1160 |
} |
1152 | 1161 |
} |
1153 | 1162 |
|
1154 | 1163 |
// Relabel tip node |
1155 |
min_red_cost = std::numeric_limits<LargeCost>::max(); |
|
1156 | 1164 |
if (tip != start) { |
1157 | 1165 |
int ra = _reverse[path.back()]; |
1158 |
min_red_cost = |
|
1166 |
min_red_cost = |
|
1167 |
std::min(min_red_cost, _cost[ra] + pi_tip - _pi[_target[ra]]); |
|
1159 | 1168 |
} |
1169 |
last_out = _next_out[tip]; |
|
1160 | 1170 |
for (int a = _first_out[tip]; a != last_out; ++a) { |
1161 |
rc = _cost[a] + pi_tip - _pi[_target[a]]; |
|
1162 |
if (_res_cap[a] > 0 && rc < min_red_cost) { |
|
1163 |
|
|
1171 |
if (_res_cap[a] > 0) { |
|
1172 |
rc = _cost[a] + pi_tip - _pi[_target[a]]; |
|
1173 |
if (rc < min_red_cost) { |
|
1174 |
min_red_cost = rc; |
|
1175 |
} |
|
1164 | 1176 |
} |
1165 | 1177 |
} |
1166 | 1178 |
_pi[tip] -= min_red_cost + _epsilon; |
1167 | 1179 |
_next_out[tip] = _first_out[tip]; |
1168 | 1180 |
++relabel_cnt; |
1169 | 1181 |
|
1170 | 1182 |
// Step back |
1171 | 1183 |
if (tip != start) { |
1172 |
|
|
1184 |
int pa = path.back(); |
|
1185 |
path_arc[pa] = false; |
|
1186 |
tip = _source[pa]; |
|
1173 | 1187 |
path.pop_back(); |
1174 | 1188 |
} |
1175 | 1189 |
|
1176 | 1190 |
next_step: ; |
1177 | 1191 |
} |
1178 | 1192 |
|
1179 | 1193 |
// Augment along the found path (as much flow as possible) |
1194 |
augment: |
|
1180 | 1195 |
Value delta; |
1181 | 1196 |
int pa, u, v = start; |
1182 | 1197 |
for (int i = 0; i != int(path.size()); ++i) { |
1183 | 1198 |
pa = path[i]; |
1184 | 1199 |
u = v; |
1185 | 1200 |
v = _target[pa]; |
1201 |
path_arc[pa] = false; |
|
1186 | 1202 |
delta = std::min(_res_cap[pa], _excess[u]); |
1187 | 1203 |
_res_cap[pa] -= delta; |
1188 | 1204 |
_res_cap[_reverse[pa]] += delta; |
1189 | 1205 |
_excess[u] -= delta; |
1190 | 1206 |
_excess[v] += delta; |
1191 |
if (_excess[v] > 0 && _excess[v] <= delta) |
|
1207 |
if (_excess[v] > 0 && _excess[v] <= delta) { |
|
1192 | 1208 |
_active_nodes.push_back(v); |
1209 |
} |
|
1193 | 1210 |
} |
1211 |
path.clear(); |
|
1194 | 1212 |
|
1195 | 1213 |
// Global update heuristic |
1196 |
if (relabel_cnt >= |
|
1214 |
if (relabel_cnt >= next_global_update_limit) { |
|
1197 | 1215 |
globalUpdate(); |
1198 |
|
|
1216 |
next_global_update_limit += global_update_skip; |
|
1199 | 1217 |
} |
1200 | 1218 |
} |
1219 |
|
|
1201 | 1220 |
} |
1221 |
|
|
1202 | 1222 |
} |
1203 | 1223 |
|
1204 | 1224 |
/// Execute the algorithm performing push and relabel operations |
1205 | 1225 |
void startPush() { |
1206 | 1226 |
// Paramters for heuristics |
1207 | 1227 |
const int EARLY_TERM_EPSILON_LIMIT = 1000; |
1208 | 1228 |
const double GLOBAL_UPDATE_FACTOR = 2.0; |
1209 | 1229 |
|
1210 |
const int |
|
1230 |
const int global_update_skip = static_cast<int>(GLOBAL_UPDATE_FACTOR * |
|
1211 | 1231 |
(_res_node_num + _sup_node_num * _sup_node_num)); |
1212 |
int next_update_limit = global_update_freq; |
|
1213 |
|
|
1214 |
int |
|
1232 |
int next_global_update_limit = global_update_skip; |
|
1215 | 1233 |
|
1216 | 1234 |
// Perform cost scaling phases |
1217 | 1235 |
BoolVector hyper(_res_node_num, false); |
1218 | 1236 |
LargeCostVector hyper_cost(_res_node_num); |
1237 |
int relabel_cnt = 0; |
|
1219 | 1238 |
for ( ; _epsilon >= 1; _epsilon = _epsilon < _alpha && _epsilon > 1 ? |
1220 | 1239 |
1 : _epsilon / _alpha ) |
1221 | 1240 |
{ |
1222 | 1241 |
// Early termination heuristic |
1223 | 1242 |
if (_epsilon <= EARLY_TERM_EPSILON_LIMIT) { |
1224 | 1243 |
if (earlyTermination()) break; |
1225 | 1244 |
} |
1226 | 1245 |
|
1227 | 1246 |
// Initialize current phase |
1228 | 1247 |
initPhase(); |
1229 | 1248 |
|
1230 | 1249 |
// Perform push and relabel operations |
1231 | 1250 |
while (_active_nodes.size() > 0) { |
1232 | 1251 |
LargeCost min_red_cost, rc, pi_n; |
1233 | 1252 |
Value delta; |
1234 | 1253 |
int n, t, a, last_out = _res_arc_num; |
1235 | 1254 |
|
1236 | 1255 |
next_node: |
1237 | 1256 |
// Select an active node (FIFO selection) |
1238 | 1257 |
n = _active_nodes.front(); |
1239 | 1258 |
last_out = _first_out[n+1]; |
1240 | 1259 |
pi_n = _pi[n]; |
1241 | 1260 |
|
1242 | 1261 |
// Perform push operations if there are admissible arcs |
1243 | 1262 |
if (_excess[n] > 0) { |
1244 | 1263 |
for (a = _next_out[n]; a != last_out; ++a) { |
1245 | 1264 |
if (_res_cap[a] > 0 && |
1246 | 1265 |
_cost[a] + pi_n - _pi[_target[a]] < 0) { |
1247 | 1266 |
delta = std::min(_res_cap[a], _excess[n]); |
1248 | 1267 |
t = _target[a]; |
1249 | 1268 |
|
1250 | 1269 |
// Push-look-ahead heuristic |
1251 | 1270 |
Value ahead = -_excess[t]; |
1252 | 1271 |
int last_out_t = _first_out[t+1]; |
1253 | 1272 |
LargeCost pi_t = _pi[t]; |
1254 | 1273 |
for (int ta = _next_out[t]; ta != last_out_t; ++ta) { |
1255 | 1274 |
if (_res_cap[ta] > 0 && |
1256 | 1275 |
_cost[ta] + pi_t - _pi[_target[ta]] < 0) |
1257 | 1276 |
ahead += _res_cap[ta]; |
1258 | 1277 |
if (ahead >= delta) break; |
1259 | 1278 |
} |
1260 | 1279 |
if (ahead < 0) ahead = 0; |
1261 | 1280 |
|
1262 | 1281 |
// Push flow along the arc |
1263 | 1282 |
if (ahead < delta && !hyper[t]) { |
1264 | 1283 |
_res_cap[a] -= ahead; |
1265 | 1284 |
_res_cap[_reverse[a]] += ahead; |
1266 | 1285 |
_excess[n] -= ahead; |
1267 | 1286 |
_excess[t] += ahead; |
1268 | 1287 |
_active_nodes.push_front(t); |
1269 | 1288 |
hyper[t] = true; |
1270 | 1289 |
hyper_cost[t] = _cost[a] + pi_n - pi_t; |
1271 | 1290 |
_next_out[n] = a; |
1272 | 1291 |
goto next_node; |
1273 | 1292 |
} else { |
1274 | 1293 |
_res_cap[a] -= delta; |
1275 | 1294 |
_res_cap[_reverse[a]] += delta; |
1276 | 1295 |
_excess[n] -= delta; |
1277 | 1296 |
_excess[t] += delta; |
1278 | 1297 |
if (_excess[t] > 0 && _excess[t] <= delta) |
1279 | 1298 |
_active_nodes.push_back(t); |
1280 | 1299 |
} |
1281 | 1300 |
|
1282 | 1301 |
if (_excess[n] == 0) { |
1283 | 1302 |
_next_out[n] = a; |
1284 | 1303 |
goto remove_nodes; |
1285 | 1304 |
} |
1286 | 1305 |
} |
1287 | 1306 |
} |
1288 | 1307 |
_next_out[n] = a; |
1289 | 1308 |
} |
1290 | 1309 |
|
1291 | 1310 |
// Relabel the node if it is still active (or hyper) |
1292 | 1311 |
if (_excess[n] > 0 || hyper[n]) { |
1293 | 1312 |
min_red_cost = hyper[n] ? -hyper_cost[n] : |
1294 | 1313 |
std::numeric_limits<LargeCost>::max(); |
1295 | 1314 |
for (int a = _first_out[n]; a != last_out; ++a) { |
1296 |
rc = _cost[a] + pi_n - _pi[_target[a]]; |
|
1297 |
if (_res_cap[a] > 0 && rc < min_red_cost) { |
|
1298 |
|
|
1315 |
if (_res_cap[a] > 0) { |
|
1316 |
rc = _cost[a] + pi_n - _pi[_target[a]]; |
|
1317 |
if (rc < min_red_cost) { |
|
1318 |
min_red_cost = rc; |
|
1319 |
} |
|
1299 | 1320 |
} |
1300 | 1321 |
} |
1301 | 1322 |
_pi[n] -= min_red_cost + _epsilon; |
1302 | 1323 |
_next_out[n] = _first_out[n]; |
1303 | 1324 |
hyper[n] = false; |
1304 | 1325 |
++relabel_cnt; |
1305 | 1326 |
} |
1306 | 1327 |
|
1307 | 1328 |
// Remove nodes that are not active nor hyper |
1308 | 1329 |
remove_nodes: |
1309 | 1330 |
while ( _active_nodes.size() > 0 && |
1310 | 1331 |
_excess[_active_nodes.front()] <= 0 && |
1311 | 1332 |
!hyper[_active_nodes.front()] ) { |
1312 | 1333 |
_active_nodes.pop_front(); |
1313 | 1334 |
} |
1314 | 1335 |
|
1315 | 1336 |
// Global update heuristic |
1316 |
if (relabel_cnt >= |
|
1337 |
if (relabel_cnt >= next_global_update_limit) { |
|
1317 | 1338 |
globalUpdate(); |
1318 | 1339 |
for (int u = 0; u != _res_node_num; ++u) |
1319 | 1340 |
hyper[u] = false; |
1320 |
|
|
1341 |
next_global_update_limit += global_update_skip; |
|
1321 | 1342 |
} |
1322 | 1343 |
} |
1323 | 1344 |
} |
1324 | 1345 |
} |
1325 | 1346 |
|
1326 | 1347 |
}; //class CostScaling |
1327 | 1348 |
|
1328 | 1349 |
///@} |
1329 | 1350 |
|
1330 | 1351 |
} //namespace lemon |
1331 | 1352 |
|
1332 | 1353 |
#endif //LEMON_COST_SCALING_H |
0 comments (0 inline)