# HG changeset patch # User Alpar Juttner # Date 1289889961 -3600 # Node ID 4980b05606bdffb4ca3c522a9449dd5b1f5310d4 # Parent 70bee017b584c6c4e3dd4013e37a4d875dbbe0f0# Parent 234d635ad72194ebd6145ef78355dc8cc3dd369b Merge diff -r 70bee017b584 -r 4980b05606bd CMakeLists.txt --- a/CMakeLists.txt Mon Nov 15 22:23:35 2010 +0100 +++ b/CMakeLists.txt Tue Nov 16 07:46:01 2010 +0100 @@ -4,6 +4,7 @@ PROJECT(${PROJECT_NAME}) INCLUDE(FindPythonInterp) +INCLUDE(FindWget) IF(EXISTS ${PROJECT_SOURCE_DIR}/cmake/version.cmake) INCLUDE(${PROJECT_SOURCE_DIR}/cmake/version.cmake) diff -r 70bee017b584 -r 4980b05606bd doc/CMakeLists.txt --- a/doc/CMakeLists.txt Mon Nov 15 22:23:35 2010 +0100 +++ b/doc/CMakeLists.txt Tue Nov 16 07:46:01 2010 +0100 @@ -3,6 +3,8 @@ SET(abs_top_srcdir ${PROJECT_SOURCE_DIR}) SET(abs_top_builddir ${PROJECT_BINARY_DIR}) +SET(LEMON_DOC_SOURCE_BROWSER "NO" CACHE STRING "Include source into the doc (YES/NO).") + CONFIGURE_FILE( ${PROJECT_SOURCE_DIR}/doc/Doxyfile.in ${PROJECT_BINARY_DIR}/doc/Doxyfile @@ -52,3 +54,15 @@ ENDIF() ENDIF() + +IF(WGET_FOUND) +ADD_CUSTOM_TARGET(update-external-tags + COMMAND ${CMAKE_COMMAND} -E make_directory dl + # COMMAND ${CMAKE_COMMAND} -E copy libstdc++.tag dl + COMMAND ${WGET_EXECUTABLE} wget -P dl -N libstdc++.tag.tmp http://gcc.gnu.org/onlinedocs/libstdc++/latest-doxygen/libstdc++.tag + COMMAND ${CMAKE_COMMAND} -E rename dl/libstdc++.tag libstdc++.tag + COMMAND ${CMAKE_COMMAND} -E remove dl/libstdc++.tag + COMMAND ${CMAKE_COMMAND} -E remove_directory dl + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + ) +ENDIF() diff -r 70bee017b584 -r 4980b05606bd doc/Doxyfile.in --- a/doc/Doxyfile.in Mon Nov 15 22:23:35 2010 +0100 +++ b/doc/Doxyfile.in Tue Nov 16 07:46:01 2010 +0100 @@ -70,7 +70,7 @@ SHOW_FILES = YES SHOW_NAMESPACES = YES FILE_VERSION_FILTER = -LAYOUT_FILE = DoxygenLayout.xml +LAYOUT_FILE = "@abs_top_srcdir@/doc/DoxygenLayout.xml" #--------------------------------------------------------------------------- # configuration options related to warning and progress messages #--------------------------------------------------------------------------- @@ -114,7 +114,7 @@ #--------------------------------------------------------------------------- # configuration options related to source browsing #--------------------------------------------------------------------------- -SOURCE_BROWSER = NO +SOURCE_BROWSER = @LEMON_DOC_SOURCE_BROWSER@ INLINE_SOURCES = NO STRIP_CODE_COMMENTS = YES REFERENCED_BY_RELATION = NO @@ -225,7 +225,7 @@ #--------------------------------------------------------------------------- # Options related to the search engine #--------------------------------------------------------------------------- -TAGFILES = "@abs_top_srcdir@/doc/libstdc++.tag = http://gcc.gnu.org/onlinedocs/libstdc++/latest-doxygen/ " +TAGFILES = "@abs_top_builddir@/doc/libstdc++.tag = http://gcc.gnu.org/onlinedocs/libstdc++/latest-doxygen/ " GENERATE_TAGFILE = html/lemon.tag ALLEXTERNALS = NO EXTERNAL_GROUPS = NO diff -r 70bee017b584 -r 4980b05606bd lemon/CMakeLists.txt --- a/lemon/CMakeLists.txt Mon Nov 15 22:23:35 2010 +0100 +++ b/lemon/CMakeLists.txt Tue Nov 16 07:46:01 2010 +0100 @@ -8,6 +8,12 @@ ${CMAKE_CURRENT_BINARY_DIR}/config.h ) +CONFIGURE_FILE( + ${CMAKE_CURRENT_SOURCE_DIR}/lemon.pc.cmake + ${CMAKE_CURRENT_BINARY_DIR}/lemon.pc + @ONLY +) + SET(LEMON_SOURCES arg_parser.cc base.cc @@ -66,3 +72,9 @@ DESTINATION include/lemon COMPONENT headers ) + +INSTALL( + FILES ${CMAKE_CURRENT_BINARY_DIR}/lemon.pc + DESTINATION lib/pkgconfig +) + diff -r 70bee017b584 -r 4980b05606bd lemon/Makefile.am --- a/lemon/Makefile.am Mon Nov 15 22:23:35 2010 +0100 +++ b/lemon/Makefile.am Tue Nov 16 07:46:01 2010 +0100 @@ -108,6 +108,7 @@ lemon/math.h \ lemon/min_cost_arborescence.h \ lemon/max_cardinality_search.h \ + lemon/nagamochi_ibaraki.h \ lemon/nauty_reader.h \ lemon/network_simplex.h \ lemon/pairing_heap.h \ diff -r 70bee017b584 -r 4980b05606bd lemon/hao_orlin.h --- a/lemon/hao_orlin.h Mon Nov 15 22:23:35 2010 +0100 +++ b/lemon/hao_orlin.h Tue Nov 16 07:46:01 2010 +0100 @@ -53,8 +53,8 @@ /// minimum cut of \f$ D \f$. The algorithm is a modified /// preflow push-relabel algorithm. Our implementation calculates /// the minimum cut in \f$ O(n^2\sqrt{m}) \f$ time (we use the - /// highest-label rule), or in \f$O(nm)\f$ for unit capacities. The - /// purpose of such algorithm is e.g. testing network reliability. + /// highest-label rule), or in \f$O(nm)\f$ for unit capacities. A notable + /// use of this algorithm is testing network reliability. /// /// For an undirected graph you can run just the first phase of the /// algorithm or you can use the algorithm of Nagamochi and Ibaraki, @@ -912,6 +912,8 @@ /// This function calculates a minimum cut with \f$ source \f$ on the /// source-side (i.e. a set \f$ X\subsetneq V \f$ with /// \f$ source \in X \f$ and minimal outgoing capacity). + /// It updates the stored cut if (and only if) the newly found one + /// is better. /// /// \pre \ref init() must be called before using this function. void calculateOut() { @@ -924,6 +926,8 @@ /// This function calculates a minimum cut with \f$ source \f$ on the /// sink-side (i.e. a set \f$ X\subsetneq V \f$ with /// \f$ source \notin X \f$ and minimal outgoing capacity). + /// It updates the stored cut if (and only if) the newly found one + /// is better. /// /// \pre \ref init() must be called before using this function. void calculateIn() { @@ -933,8 +937,8 @@ /// \brief Run the algorithm. /// - /// This function runs the algorithm. It finds nodes \c source and - /// \c target arbitrarily and then calls \ref init(), \ref calculateOut() + /// This function runs the algorithm. It chooses source node, + /// then calls \ref init(), \ref calculateOut() /// and \ref calculateIn(). void run() { init(); @@ -944,9 +948,9 @@ /// \brief Run the algorithm. /// - /// This function runs the algorithm. It uses the given \c source node, - /// finds a proper \c target node and then calls the \ref init(), - /// \ref calculateOut() and \ref calculateIn(). + /// This function runs the algorithm. It calls \ref init(), + /// \ref calculateOut() and \ref calculateIn() with the given + /// source node. void run(const Node& s) { init(s); calculateOut(); @@ -965,7 +969,9 @@ /// \brief Return the value of the minimum cut. /// - /// This function returns the value of the minimum cut. + /// This function returns the value of the best cut found by the + /// previously called \ref run(), \ref calculateOut() or \ref + /// calculateIn(). /// /// \pre \ref run(), \ref calculateOut() or \ref calculateIn() /// must be called before using this function. @@ -976,9 +982,13 @@ /// \brief Return a minimum cut. /// - /// This function sets \c cutMap to the characteristic vector of a - /// minimum value cut: it will give a non-empty set \f$ X\subsetneq V \f$ - /// with minimal outgoing capacity (i.e. \c cutMap will be \c true exactly + /// This function gives the best cut found by the + /// previously called \ref run(), \ref calculateOut() or \ref + /// calculateIn(). + /// + /// It sets \c cutMap to the characteristic vector of the found + /// minimum value cut - a non-empty set \f$ X\subsetneq V \f$ + /// of minimum outgoing capacity (i.e. \c cutMap will be \c true exactly /// for the nodes of \f$ X \f$). /// /// \param cutMap A \ref concepts::WriteMap "writable" node map with diff -r 70bee017b584 -r 4980b05606bd lemon/lemon.pc.cmake --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/lemon/lemon.pc.cmake Tue Nov 16 07:46:01 2010 +0100 @@ -0,0 +1,10 @@ +prefix=@CMAKE_INSTALL_PREFIX@ +exec_prefix=@CMAKE_INSTALL_PREFIX@/bin +libdir=@CMAKE_INSTALL_PREFIX@/lib +includedir=@CMAKE_INSTALL_PREFIX@/include + +Name: @PROJECT_NAME@ +Description: Library for Efficient Modeling and Optimization in Networks +Version: @PROJECT_VERSION@ +Libs: -L${libdir} -lemon @GLPK_LIBS@ @CPLEX_LIBS@ @SOPLEX_LIBS@ @CLP_LIBS@ @CBC_LIBS@ +Cflags: -I${includedir} diff -r 70bee017b584 -r 4980b05606bd lemon/nagamochi_ibaraki.h --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/lemon/nagamochi_ibaraki.h Tue Nov 16 07:46:01 2010 +0100 @@ -0,0 +1,697 @@ +/* -*- mode: C++; indent-tabs-mode: nil; -*- + * + * This file is a part of LEMON, a generic C++ optimization library. + * + * Copyright (C) 2003-2010 + * Egervary Jeno Kombinatorikus Optimalizalasi Kutatocsoport + * (Egervary Research Group on Combinatorial Optimization, EGRES). + * + * Permission to use, modify and distribute this software is granted + * provided that this copyright notice appears in all copies. For + * precise terms see the accompanying LICENSE file. + * + * This software is provided "AS IS" with no warranty of any kind, + * express or implied, and with no claim as to its suitability for any + * purpose. + * + */ + +#ifndef LEMON_NAGAMOCHI_IBARAKI_H +#define LEMON_NAGAMOCHI_IBARAKI_H + + +/// \ingroup min_cut +/// \file +/// \brief Implementation of the Nagamochi-Ibaraki algorithm. + +#include +#include +#include +#include +#include +#include + +#include + +namespace lemon { + + /// \brief Default traits class for NagamochiIbaraki class. + /// + /// Default traits class for NagamochiIbaraki class. + /// \param GR The undirected graph type. + /// \param CM Type of capacity map. + template + struct NagamochiIbarakiDefaultTraits { + /// The type of the capacity map. + typedef typename CM::Value Value; + + /// The undirected graph type the algorithm runs on. + typedef GR Graph; + + /// \brief The type of the map that stores the edge capacities. + /// + /// The type of the map that stores the edge capacities. + /// It must meet the \ref concepts::ReadMap "ReadMap" concept. + typedef CM CapacityMap; + + /// \brief Instantiates a CapacityMap. + /// + /// This function instantiates a \ref CapacityMap. +#ifdef DOXYGEN + static CapacityMap *createCapacityMap(const Graph& graph) +#else + static CapacityMap *createCapacityMap(const Graph&) +#endif + { + LEMON_ASSERT(false, "CapacityMap is not initialized"); + return 0; // ignore warnings + } + + /// \brief The cross reference type used by heap. + /// + /// The cross reference type used by heap. + /// Usually \c Graph::NodeMap. + typedef typename Graph::template NodeMap HeapCrossRef; + + /// \brief Instantiates a HeapCrossRef. + /// + /// This function instantiates a \ref HeapCrossRef. + /// \param g is the graph, to which we would like to define the + /// \ref HeapCrossRef. + static HeapCrossRef *createHeapCrossRef(const Graph& g) { + return new HeapCrossRef(g); + } + + /// \brief The heap type used by NagamochiIbaraki algorithm. + /// + /// The heap type used by NagamochiIbaraki algorithm. It has to + /// maximize the priorities. + /// + /// \sa BinHeap + /// \sa NagamochiIbaraki + typedef BinHeap > Heap; + + /// \brief Instantiates a Heap. + /// + /// This function instantiates a \ref Heap. + /// \param r is the cross reference of the heap. + static Heap *createHeap(HeapCrossRef& r) { + return new Heap(r); + } + }; + + /// \ingroup min_cut + /// + /// \brief Calculates the minimum cut in an undirected graph. + /// + /// Calculates the minimum cut in an undirected graph with the + /// Nagamochi-Ibaraki algorithm. The algorithm separates the graph's + /// nodes into two partitions with the minimum sum of edge capacities + /// between the two partitions. The algorithm can be used to test + /// the network reliability, especially to test how many links have + /// to be destroyed in the network to split it to at least two + /// distinict subnetworks. + /// + /// The complexity of the algorithm is \f$ O(nm\log(n)) \f$ but with + /// \ref FibHeap "Fibonacci heap" it can be decreased to + /// \f$ O(nm+n^2\log(n)) \f$. When the edges have unit capacities, + /// \c BucketHeap can be used which yields \f$ O(nm) \f$ time + /// complexity. + /// + /// \warning The value type of the capacity map should be able to + /// hold any cut value of the graph, otherwise the result can + /// overflow. + /// \note This capacity is supposed to be integer type. +#ifdef DOXYGEN + template +#else + template , + typename TR = NagamochiIbarakiDefaultTraits > +#endif + class NagamochiIbaraki { + public: + + typedef TR Traits; + /// The type of the underlying graph. + typedef typename Traits::Graph Graph; + + /// The type of the capacity map. + typedef typename Traits::CapacityMap CapacityMap; + /// The value type of the capacity map. + typedef typename Traits::CapacityMap::Value Value; + + /// The heap type used by the algorithm. + typedef typename Traits::Heap Heap; + /// The cross reference type used for the heap. + typedef typename Traits::HeapCrossRef HeapCrossRef; + + ///\name Named template parameters + + ///@{ + + struct SetUnitCapacityTraits : public Traits { + typedef ConstMap > CapacityMap; + static CapacityMap *createCapacityMap(const Graph&) { + return new CapacityMap(); + } + }; + + /// \brief \ref named-templ-param "Named parameter" for setting + /// the capacity map to a constMap() instance + /// + /// \ref named-templ-param "Named parameter" for setting + /// the capacity map to a constMap() instance + struct SetUnitCapacity + : public NagamochiIbaraki { + typedef NagamochiIbaraki Create; + }; + + + template + struct SetHeapTraits : public Traits { + typedef CR HeapCrossRef; + typedef H Heap; + static HeapCrossRef *createHeapCrossRef(int num) { + LEMON_ASSERT(false, "HeapCrossRef is not initialized"); + return 0; // ignore warnings + } + static Heap *createHeap(HeapCrossRef &) { + LEMON_ASSERT(false, "Heap is not initialized"); + return 0; // ignore warnings + } + }; + + /// \brief \ref named-templ-param "Named parameter" for setting + /// heap and cross reference type + /// + /// \ref named-templ-param "Named parameter" for setting heap and + /// cross reference type. The heap has to maximize the priorities. + template > + struct SetHeap + : public NagamochiIbaraki > { + typedef NagamochiIbaraki< Graph, CapacityMap, SetHeapTraits > + Create; + }; + + template + struct SetStandardHeapTraits : public Traits { + typedef CR HeapCrossRef; + typedef H Heap; + static HeapCrossRef *createHeapCrossRef(int size) { + return new HeapCrossRef(size); + } + static Heap *createHeap(HeapCrossRef &crossref) { + return new Heap(crossref); + } + }; + + /// \brief \ref named-templ-param "Named parameter" for setting + /// heap and cross reference type with automatic allocation + /// + /// \ref named-templ-param "Named parameter" for setting heap and + /// cross reference type with automatic allocation. They should + /// have standard constructor interfaces to be able to + /// automatically created by the algorithm (i.e. the graph should + /// be passed to the constructor of the cross reference and the + /// cross reference should be passed to the constructor of the + /// heap). However, external heap and cross reference objects + /// could also be passed to the algorithm using the \ref heap() + /// function before calling \ref run() or \ref init(). The heap + /// has to maximize the priorities. + /// \sa SetHeap + template > + struct SetStandardHeap + : public NagamochiIbaraki > { + typedef NagamochiIbaraki > Create; + }; + + ///@} + + + private: + + const Graph &_graph; + const CapacityMap *_capacity; + bool _local_capacity; // unit capacity + + struct ArcData { + typename Graph::Node target; + int prev, next; + }; + struct EdgeData { + Value capacity; + Value cut; + }; + + struct NodeData { + int first_arc; + typename Graph::Node prev, next; + int curr_arc; + typename Graph::Node last_rep; + Value sum; + }; + + typename Graph::template NodeMap *_nodes; + std::vector _arcs; + std::vector _edges; + + typename Graph::Node _first_node; + int _node_num; + + Value _min_cut; + + HeapCrossRef *_heap_cross_ref; + bool _local_heap_cross_ref; + Heap *_heap; + bool _local_heap; + + typedef typename Graph::template NodeMap NodeList; + NodeList *_next_rep; + + typedef typename Graph::template NodeMap MinCutMap; + MinCutMap *_cut_map; + + void createStructures() { + if (!_nodes) { + _nodes = new (typename Graph::template NodeMap)(_graph); + } + if (!_capacity) { + _local_capacity = true; + _capacity = Traits::createCapacityMap(_graph); + } + if (!_heap_cross_ref) { + _local_heap_cross_ref = true; + _heap_cross_ref = Traits::createHeapCrossRef(_graph); + } + if (!_heap) { + _local_heap = true; + _heap = Traits::createHeap(*_heap_cross_ref); + } + if (!_next_rep) { + _next_rep = new NodeList(_graph); + } + if (!_cut_map) { + _cut_map = new MinCutMap(_graph); + } + } + + public : + + typedef NagamochiIbaraki Create; + + + /// \brief Constructor. + /// + /// \param graph The graph the algorithm runs on. + /// \param capacity The capacity map used by the algorithm. + NagamochiIbaraki(const Graph& graph, const CapacityMap& capacity) + : _graph(graph), _capacity(&capacity), _local_capacity(false), + _nodes(0), _arcs(), _edges(), _min_cut(), + _heap_cross_ref(0), _local_heap_cross_ref(false), + _heap(0), _local_heap(false), + _next_rep(0), _cut_map(0) {} + + /// \brief Constructor. + /// + /// This constructor can be used only when the Traits class + /// defines how can the local capacity map be instantiated. + /// If the SetUnitCapacity used the algorithm automatically + /// constructs the capacity map. + /// + ///\param graph The graph the algorithm runs on. + NagamochiIbaraki(const Graph& graph) + : _graph(graph), _capacity(0), _local_capacity(false), + _nodes(0), _arcs(), _edges(), _min_cut(), + _heap_cross_ref(0), _local_heap_cross_ref(false), + _heap(0), _local_heap(false), + _next_rep(0), _cut_map(0) {} + + /// \brief Destructor. + /// + /// Destructor. + ~NagamochiIbaraki() { + if (_local_capacity) delete _capacity; + if (_nodes) delete _nodes; + if (_local_heap) delete _heap; + if (_local_heap_cross_ref) delete _heap_cross_ref; + if (_next_rep) delete _next_rep; + if (_cut_map) delete _cut_map; + } + + /// \brief Sets the heap and the cross reference used by algorithm. + /// + /// Sets the heap and the cross reference used by algorithm. + /// If you don't use this function before calling \ref run(), + /// it will allocate one. The destuctor deallocates this + /// automatically allocated heap and cross reference, of course. + /// \return (*this) + NagamochiIbaraki &heap(Heap& hp, HeapCrossRef &cr) + { + if (_local_heap_cross_ref) { + delete _heap_cross_ref; + _local_heap_cross_ref = false; + } + _heap_cross_ref = &cr; + if (_local_heap) { + delete _heap; + _local_heap = false; + } + _heap = &hp; + return *this; + } + + /// \name Execution control + /// The simplest way to execute the algorithm is to use + /// one of the member functions called \c run(). + /// \n + /// If you need more control on the execution, + /// first you must call \ref init() and then call the start() + /// or proper times the processNextPhase() member functions. + + ///@{ + + /// \brief Initializes the internal data structures. + /// + /// Initializes the internal data structures. + void init() { + createStructures(); + + int edge_num = countEdges(_graph); + _edges.resize(edge_num); + _arcs.resize(2 * edge_num); + + typename Graph::Node prev = INVALID; + _node_num = 0; + for (typename Graph::NodeIt n(_graph); n != INVALID; ++n) { + (*_cut_map)[n] = false; + (*_next_rep)[n] = INVALID; + (*_nodes)[n].last_rep = n; + (*_nodes)[n].first_arc = -1; + (*_nodes)[n].curr_arc = -1; + (*_nodes)[n].prev = prev; + if (prev != INVALID) { + (*_nodes)[prev].next = n; + } + (*_nodes)[n].next = INVALID; + (*_nodes)[n].sum = 0; + prev = n; + ++_node_num; + } + + _first_node = typename Graph::NodeIt(_graph); + + int index = 0; + for (typename Graph::NodeIt n(_graph); n != INVALID; ++n) { + for (typename Graph::OutArcIt a(_graph, n); a != INVALID; ++a) { + typename Graph::Node m = _graph.target(a); + + if (!(n < m)) continue; + + (*_nodes)[n].sum += (*_capacity)[a]; + (*_nodes)[m].sum += (*_capacity)[a]; + + int c = (*_nodes)[m].curr_arc; + if (c != -1 && _arcs[c ^ 1].target == n) { + _edges[c >> 1].capacity += (*_capacity)[a]; + } else { + _edges[index].capacity = (*_capacity)[a]; + + _arcs[index << 1].prev = -1; + if ((*_nodes)[n].first_arc != -1) { + _arcs[(*_nodes)[n].first_arc].prev = (index << 1); + } + _arcs[index << 1].next = (*_nodes)[n].first_arc; + (*_nodes)[n].first_arc = (index << 1); + _arcs[index << 1].target = m; + + (*_nodes)[m].curr_arc = (index << 1); + + _arcs[(index << 1) | 1].prev = -1; + if ((*_nodes)[m].first_arc != -1) { + _arcs[(*_nodes)[m].first_arc].prev = ((index << 1) | 1); + } + _arcs[(index << 1) | 1].next = (*_nodes)[m].first_arc; + (*_nodes)[m].first_arc = ((index << 1) | 1); + _arcs[(index << 1) | 1].target = n; + + ++index; + } + } + } + + typename Graph::Node cut_node = INVALID; + _min_cut = std::numeric_limits::max(); + + for (typename Graph::Node n = _first_node; + n != INVALID; n = (*_nodes)[n].next) { + if ((*_nodes)[n].sum < _min_cut) { + cut_node = n; + _min_cut = (*_nodes)[n].sum; + } + } + (*_cut_map)[cut_node] = true; + if (_min_cut == 0) { + _first_node = INVALID; + } + } + + public: + + /// \brief Processes the next phase + /// + /// Processes the next phase in the algorithm. It must be called + /// at most one less the number of the nodes in the graph. + /// + ///\return %True when the algorithm finished. + bool processNextPhase() { + if (_first_node == INVALID) return true; + + _heap->clear(); + for (typename Graph::Node n = _first_node; + n != INVALID; n = (*_nodes)[n].next) { + (*_heap_cross_ref)[n] = Heap::PRE_HEAP; + } + + std::vector order; + order.reserve(_node_num); + int sep = 0; + + Value alpha = 0; + Value pmc = std::numeric_limits::max(); + + _heap->push(_first_node, static_cast(0)); + while (!_heap->empty()) { + typename Graph::Node n = _heap->top(); + Value v = _heap->prio(); + + _heap->pop(); + for (int a = (*_nodes)[n].first_arc; a != -1; a = _arcs[a].next) { + switch (_heap->state(_arcs[a].target)) { + case Heap::PRE_HEAP: + { + Value nv = _edges[a >> 1].capacity; + _heap->push(_arcs[a].target, nv); + _edges[a >> 1].cut = nv; + } break; + case Heap::IN_HEAP: + { + Value nv = _edges[a >> 1].capacity + (*_heap)[_arcs[a].target]; + _heap->decrease(_arcs[a].target, nv); + _edges[a >> 1].cut = nv; + } break; + case Heap::POST_HEAP: + break; + } + } + + alpha += (*_nodes)[n].sum; + alpha -= 2 * v; + + order.push_back(n); + if (!_heap->empty()) { + if (alpha < pmc) { + pmc = alpha; + sep = order.size(); + } + } + } + + if (static_cast(order.size()) < _node_num) { + _first_node = INVALID; + for (typename Graph::NodeIt n(_graph); n != INVALID; ++n) { + (*_cut_map)[n] = false; + } + for (int i = 0; i < static_cast(order.size()); ++i) { + typename Graph::Node n = order[i]; + while (n != INVALID) { + (*_cut_map)[n] = true; + n = (*_next_rep)[n]; + } + } + _min_cut = 0; + return true; + } + + if (pmc < _min_cut) { + for (typename Graph::NodeIt n(_graph); n != INVALID; ++n) { + (*_cut_map)[n] = false; + } + for (int i = 0; i < sep; ++i) { + typename Graph::Node n = order[i]; + while (n != INVALID) { + (*_cut_map)[n] = true; + n = (*_next_rep)[n]; + } + } + _min_cut = pmc; + } + + for (typename Graph::Node n = _first_node; + n != INVALID; n = (*_nodes)[n].next) { + bool merged = false; + for (int a = (*_nodes)[n].first_arc; a != -1; a = _arcs[a].next) { + if (!(_edges[a >> 1].cut < pmc)) { + if (!merged) { + for (int b = (*_nodes)[n].first_arc; b != -1; b = _arcs[b].next) { + (*_nodes)[_arcs[b].target].curr_arc = b; + } + merged = true; + } + typename Graph::Node m = _arcs[a].target; + int nb = 0; + for (int b = (*_nodes)[m].first_arc; b != -1; b = nb) { + nb = _arcs[b].next; + if ((b ^ a) == 1) continue; + typename Graph::Node o = _arcs[b].target; + int c = (*_nodes)[o].curr_arc; + if (c != -1 && _arcs[c ^ 1].target == n) { + _edges[c >> 1].capacity += _edges[b >> 1].capacity; + (*_nodes)[n].sum += _edges[b >> 1].capacity; + if (_edges[b >> 1].cut < _edges[c >> 1].cut) { + _edges[b >> 1].cut = _edges[c >> 1].cut; + } + if (_arcs[b ^ 1].prev != -1) { + _arcs[_arcs[b ^ 1].prev].next = _arcs[b ^ 1].next; + } else { + (*_nodes)[o].first_arc = _arcs[b ^ 1].next; + } + if (_arcs[b ^ 1].next != -1) { + _arcs[_arcs[b ^ 1].next].prev = _arcs[b ^ 1].prev; + } + } else { + if (_arcs[a].next != -1) { + _arcs[_arcs[a].next].prev = b; + } + _arcs[b].next = _arcs[a].next; + _arcs[b].prev = a; + _arcs[a].next = b; + _arcs[b ^ 1].target = n; + + (*_nodes)[n].sum += _edges[b >> 1].capacity; + (*_nodes)[o].curr_arc = b; + } + } + + if (_arcs[a].prev != -1) { + _arcs[_arcs[a].prev].next = _arcs[a].next; + } else { + (*_nodes)[n].first_arc = _arcs[a].next; + } + if (_arcs[a].next != -1) { + _arcs[_arcs[a].next].prev = _arcs[a].prev; + } + + (*_nodes)[n].sum -= _edges[a >> 1].capacity; + (*_next_rep)[(*_nodes)[n].last_rep] = m; + (*_nodes)[n].last_rep = (*_nodes)[m].last_rep; + + if ((*_nodes)[m].prev != INVALID) { + (*_nodes)[(*_nodes)[m].prev].next = (*_nodes)[m].next; + } else{ + _first_node = (*_nodes)[m].next; + } + if ((*_nodes)[m].next != INVALID) { + (*_nodes)[(*_nodes)[m].next].prev = (*_nodes)[m].prev; + } + --_node_num; + } + } + } + + if (_node_num == 1) { + _first_node = INVALID; + return true; + } + + return false; + } + + /// \brief Executes the algorithm. + /// + /// Executes the algorithm. + /// + /// \pre init() must be called + void start() { + while (!processNextPhase()) {} + } + + + /// \brief Runs %NagamochiIbaraki algorithm. + /// + /// This method runs the %Min cut algorithm + /// + /// \note mc.run(s) is just a shortcut of the following code. + ///\code + /// mc.init(); + /// mc.start(); + ///\endcode + void run() { + init(); + start(); + } + + ///@} + + /// \name Query Functions + /// + /// The result of the %NagamochiIbaraki + /// algorithm can be obtained using these functions.\n + /// Before the use of these functions, either run() or start() + /// must be called. + + ///@{ + + /// \brief Returns the min cut value. + /// + /// Returns the min cut value if the algorithm finished. + /// After the first processNextPhase() it is a value of a + /// valid cut in the graph. + Value minCutValue() const { + return _min_cut; + } + + /// \brief Returns a min cut in a NodeMap. + /// + /// It sets the nodes of one of the two partitions to true and + /// the other partition to false. + /// \param cutMap A \ref concepts::WriteMap "writable" node map with + /// \c bool (or convertible) value type. + template + Value minCutMap(CutMap& cutMap) const { + for (typename Graph::NodeIt n(_graph); n != INVALID; ++n) { + cutMap.set(n, (*_cut_map)[n]); + } + return minCutValue(); + } + + ///@} + + }; +} + +#endif diff -r 70bee017b584 -r 4980b05606bd test/CMakeLists.txt --- a/test/CMakeLists.txt Mon Nov 15 22:23:35 2010 +0100 +++ b/test/CMakeLists.txt Tue Nov 16 07:46:01 2010 +0100 @@ -36,6 +36,7 @@ min_cost_arborescence_test min_cost_flow_test min_mean_cycle_test + nagamochi_ibaraki_test path_test planarity_test preflow_test @@ -47,7 +48,12 @@ ) IF(LEMON_HAVE_LP) - ADD_EXECUTABLE(lp_test lp_test.cc) + IF(${CMAKE_BUILD_TYPE} STREQUAL "Maintainer") + ADD_EXECUTABLE(lp_test lp_test.cc) + ELSE() + ADD_EXECUTABLE(lp_test EXCLUDE_FROM_ALL lp_test.cc) + ENDIF() + SET(LP_TEST_LIBS lemon) IF(LEMON_HAVE_GLPK) @@ -83,7 +89,12 @@ ENDIF() IF(LEMON_HAVE_MIP) - ADD_EXECUTABLE(mip_test mip_test.cc) + IF(${CMAKE_BUILD_TYPE} STREQUAL "Maintainer") + ADD_EXECUTABLE(mip_test mip_test.cc) + ELSE() + ADD_EXECUTABLE(mip_test EXCLUDE_FROM_ALL mip_test.cc) + ENDIF() + SET(MIP_TEST_LIBS lemon) IF(LEMON_HAVE_GLPK) diff -r 70bee017b584 -r 4980b05606bd test/Makefile.am --- a/test/Makefile.am Mon Nov 15 22:23:35 2010 +0100 +++ b/test/Makefile.am Tue Nov 16 07:46:01 2010 +0100 @@ -38,6 +38,7 @@ test/min_cost_arborescence_test \ test/min_cost_flow_test \ test/min_mean_cycle_test \ + test/nagamochi_ibaraki_test \ test/path_test \ test/planarity_test \ test/preflow_test \ @@ -91,6 +92,7 @@ test_min_cost_arborescence_test_SOURCES = test/min_cost_arborescence_test.cc test_min_cost_flow_test_SOURCES = test/min_cost_flow_test.cc test_min_mean_cycle_test_SOURCES = test/min_mean_cycle_test.cc +test_nagamochi_ibaraki_test_SOURCES = test/nagamochi_ibaraki_test.cc test_path_test_SOURCES = test/path_test.cc test_planarity_test_SOURCES = test/planarity_test.cc test_preflow_test_SOURCES = test/preflow_test.cc diff -r 70bee017b584 -r 4980b05606bd test/nagamochi_ibaraki_test.cc --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/nagamochi_ibaraki_test.cc Tue Nov 16 07:46:01 2010 +0100 @@ -0,0 +1,141 @@ +/* -*- mode: C++; indent-tabs-mode: nil; -*- + * + * This file is a part of LEMON, a generic C++ optimization library. + * + * Copyright (C) 2003-2010 + * Egervary Jeno Kombinatorikus Optimalizalasi Kutatocsoport + * (Egervary Research Group on Combinatorial Optimization, EGRES). + * + * Permission to use, modify and distribute this software is granted + * provided that this copyright notice appears in all copies. For + * precise terms see the accompanying LICENSE file. + * + * This software is provided "AS IS" with no warranty of any kind, + * express or implied, and with no claim as to its suitability for any + * purpose. + * + */ + +#include + +#include +#include +#include +#include +#include +#include + +#include "test_tools.h" + +using namespace lemon; +using namespace std; + +const std::string lgf = + "@nodes\n" + "label\n" + "0\n" + "1\n" + "2\n" + "3\n" + "4\n" + "5\n" + "@edges\n" + " cap1 cap2 cap3\n" + "0 1 1 1 1 \n" + "0 2 2 2 4 \n" + "1 2 4 4 4 \n" + "3 4 1 1 1 \n" + "3 5 2 2 4 \n" + "4 5 4 4 4 \n" + "2 3 1 6 6 \n"; + +void checkNagamochiIbarakiCompile() +{ + typedef int Value; + typedef concepts::Graph Graph; + + typedef Graph::Node Node; + typedef Graph::Edge Edge; + typedef concepts::ReadMap CapMap; + typedef concepts::WriteMap CutMap; + + Graph g; + Node n; + CapMap cap; + CutMap cut; + Value v; + bool b; + + NagamochiIbaraki ni_test(g, cap); + const NagamochiIbaraki& const_ni_test = ni_test; + + ni_test.init(); + ni_test.start(); + b = ni_test.processNextPhase(); + ni_test.run(); + + v = const_ni_test.minCutValue(); + v = const_ni_test.minCutMap(cut); +} + +template +typename CapMap::Value + cutValue(const Graph& graph, const CapMap& cap, const CutMap& cut) +{ + typename CapMap::Value sum = 0; + for (typename Graph::EdgeIt e(graph); e != INVALID; ++e) { + if (cut[graph.u(e)] != cut[graph.v(e)]) { + sum += cap[e]; + } + } + return sum; +} + +int main() { + SmartGraph graph; + SmartGraph::EdgeMap cap1(graph), cap2(graph), cap3(graph); + SmartGraph::NodeMap cut(graph); + + istringstream input(lgf); + graphReader(graph, input) + .edgeMap("cap1", cap1) + .edgeMap("cap2", cap2) + .edgeMap("cap3", cap3) + .run(); + + { + NagamochiIbaraki ni(graph, cap1); + ni.run(); + ni.minCutMap(cut); + + check(ni.minCutValue() == 1, "Wrong cut value"); + check(ni.minCutValue() == cutValue(graph, cap1, cut), "Wrong cut value"); + } + { + NagamochiIbaraki ni(graph, cap2); + ni.run(); + ni.minCutMap(cut); + + check(ni.minCutValue() == 3, "Wrong cut value"); + check(ni.minCutValue() == cutValue(graph, cap2, cut), "Wrong cut value"); + } + { + NagamochiIbaraki ni(graph, cap3); + ni.run(); + ni.minCutMap(cut); + + check(ni.minCutValue() == 5, "Wrong cut value"); + check(ni.minCutValue() == cutValue(graph, cap3, cut), "Wrong cut value"); + } + { + NagamochiIbaraki::SetUnitCapacity::Create ni(graph); + ni.run(); + ni.minCutMap(cut); + + ConstMap cap4(1); + check(ni.minCutValue() == 1, "Wrong cut value"); + check(ni.minCutValue() == cutValue(graph, cap4, cut), "Wrong cut value"); + } + + return 0; +}