19.5 Modifying the Likelihood class

(Win version)

< 19.4 | 19.5 | 19.6 >

As you know, the Likelihood class uses BeagleLib to compute the likelihood of a tree. BeagleLib computes the likelihood by working down the tree in reverse level order, combining the conditional likelihood arrays for each pair of child lineages to compute the conditional likelihood vector for the parent node.

If trees contain polytomies, then a wrench is thrown in the works because a polytomous node has more than two children. We will take advantage of the fact that

  1. there are always enough nodes in the _nodes array of the Tree object to represent a fully resolved tree, and
  2. the likelihood of a polytomous tree is identical to the likelihood of a fully resolved tree representing an arbitrary resolution of all polytomies so long as the edges added have an edge length equal to zero.

Begin by adding the highlighted lines below to the Likelihood class declaration in likelihood.hpp:

#pragma once    

#include &lt;map&gt;
#include &lt;boost/algorithm/string.hpp&gt;
#include &lt;boost/format.hpp&gt;
#include &lt;boost/shared_ptr.hpp&gt;
#include &lt;boost/range/adaptor/reversed.hpp&gt;
#include "libhmsbeagle/beagle.h"
#include "tree.hpp"
<span style="color:#0000ff"><strong>#include "tree_manip.hpp"</strong></span>
#include "data.hpp"
#include "model.hpp"
#include "xstrom.hpp"

namespace strom {

    class Likelihood {
        public:
                                                    Likelihood();
                                                    ~Likelihood();

            void                                    setRooted(bool is_rooted);
            void                                    setPreferGPU(bool prefer_gpu);
            void                                    setAmbiguityEqualsMissing(bool ambig_equals_missing);
        
            bool                                    usingStoredData() const;
            void                                    useStoredData(bool using_data);
            void                                    useUnderflowScaling(bool do_scaling);

            std::string                             beagleLibVersion() const;
            std::string                             availableResources() const;
            std::string                             usedResources() const;

            void                                    initBeagleLib();
            void                                    finalizeBeagleLib(bool use_exceptions);

            double                                  calcLogLikelihood(Tree::SharedPtr t);

            Data::SharedPtr                         getData();
            void                                    setData(Data::SharedPtr d);

            Model::SharedPtr                        getModel();
            void                                    setModel(Model::SharedPtr m);

            void                                    clear();
            
            unsigned                                calcNumEdgesInFullyResolvedTree() const;
            unsigned                                calcNumInternalsInFullyResolvedTree() const;
            
        private:
        
            struct InstanceInfo {
                int handle;
                int resourcenumber;
                std::string resourcename;
                unsigned nstates;
                unsigned nratecateg;
                unsigned npatterns;
                unsigned partial_offset;
                unsigned tmatrix_offset;
                bool invarmodel;
                std::vector&lt;unsigned&gt; subsets;
                
                InstanceInfo() : handle(-1), resourcenumber(-1), resourcename(""), nstates(0), nratecateg(0), npatterns(0), partial_offset(0), tmatrix_offset(0), invarmodel(false) {}
            };

            typedef std::pair&lt;unsigned, int&gt;        instance_pair_t;

            unsigned                                getScalerIndex(Node * nd, InstanceInfo & info) const;
            unsigned                                getPartialIndex(Node * nd, InstanceInfo & info) const;
            unsigned                                getTMatrixIndex(Node * nd, InstanceInfo & info, unsigned subset_index) const;
            void                                    updateInstanceMap(instance_pair_t & p, unsigned subset);
            void                                    newInstance(unsigned nstates, int nrates, std::vector&lt;unsigned&gt; & subset_indices);
            void                                    setTipStates();
            void                                    setTipPartials();
            void                                    setPatternPartitionAssignments();
            void                                    setPatternWeights();
            void                                    setAmongSiteRateHeterogenetity();
            void                                    setModelRateMatrix();
            void                                    addOperation(InstanceInfo & info, Node * nd, Node * lchild, Node * rchild, unsigned subset_index);
            <span style="color:#0000ff"><strong>void                                    queuePartialsRecalculation(Node * nd, Node * lchild, Node * rchild, Node * polytomy = 0);</strong></span>
            void                                    queueTMatrixRecalculation(Node * nd);
            void                                    defineOperations(Tree::SharedPtr t);
            void                                    updateTransitionMatrices();
            void                                    calculatePartials();
            double                                  calcInstanceLogLikelihood(InstanceInfo & inst, Tree::SharedPtr t);


            std::vector&lt;InstanceInfo&gt;               _instances;
            std::map&lt;int, std::string&gt;              _beagle_error;
            std::map&lt;int, std::vector&lt;int&gt; &gt;        _operations;
            std::map&lt;int, std::vector&lt;int&gt; &gt;        _pmatrix_index;
            std::map&lt;int, std::vector&lt;double&gt; &gt;     _edge_lengths;
            std::map&lt;int, std::vector&lt;int&gt; &gt;        _eigen_indices;
            std::map&lt;int, std::vector&lt;int&gt; &gt;        _category_rate_indices;
            double                                  _relrate_normalizing_constant;

            std::vector&lt;int&gt;                        _subset_indices;
            std::vector&lt;int&gt;                        _parent_indices;
            std::vector&lt;int&gt;                        _child_indices;
            std::vector&lt;int&gt;                        _tmatrix_indices;
            std::vector&lt;int&gt;                        _weights_indices;
            std::vector&lt;int&gt;                        _freqs_indices;
            std::vector&lt;int&gt;                        _scaling_indices;

            Model::SharedPtr                        _model;

            Data::SharedPtr                         _data;
            unsigned                                _ntaxa;
            bool                                    _rooted;
            bool                                    _prefer_gpu;
            bool                                    _ambiguity_equals_missing;
            bool                                    _underflow_scaling;
            bool                                    _using_data;

            <span style="color:#0000ff"><strong>std::vector&lt;Node *&gt;                     _polytomy_helpers;</strong></span>
            <span style="color:#0000ff"><strong>std::map&lt;int, std::vector&lt;int&gt; &gt;        _polytomy_map;</strong></span>
            <span style="color:#0000ff"><strong>std::vector&lt;double&gt;                     _identity_matrix;</strong></span>

        public:
            typedef std::shared_ptr&lt; Likelihood &gt;   SharedPtr;
    };
    

Modifying the clear function

Add the indicated line to the clear function.

    inline void Likelihood::clear() {   
        finalizeBeagleLib(true);
        
        _ntaxa                      = 0;
        _rooted                     = false;
        _prefer_gpu                 = false;
        _ambiguity_equals_missing   = true;
        _underflow_scaling          = false;
        _using_data                 = true;
        _data                       = nullptr;
        
        _operations.clear();
        _pmatrix_index.clear();
        _edge_lengths.clear();
        _eigen_indices.clear();
        _category_rate_indices.clear();
        _relrate_normalizing_constant = 1.0;
        _subset_indices.assign(1, 0);
        _parent_indices.assign(1, 0);
        _child_indices.assign(1, 0);
        _tmatrix_indices.assign(1, 0);
        _weights_indices.assign(1, 0);
        _freqs_indices.assign(1, 0);
        _scaling_indices.assign(1, 0);
        <span style="color:#0000ff"><strong>_identity_matrix.assign(1, 0.0);</strong></span>

        _model = Model::SharedPtr(new Model());        

        // Store BeagleLib error codes so that useful
        // error messages may be provided to the user
        _beagle_error.clear();
        _beagle_error[0]  = std::string("success");
        _beagle_error[-1] = std::string("unspecified error");
        _beagle_error[-2] = std::string("not enough memory could be allocated");
        _beagle_error[-3] = std::string("unspecified exception");
        _beagle_error[-4] = std::string("the instance index is out of range, or the instance has not been created");
        _beagle_error[-5] = std::string("one of the indices specified exceeded the range of the array");
        _beagle_error[-6] = std::string("no resource matches requirements");
        _beagle_error[-7] = std::string("no implementation matches requirements");
        _beagle_error[-8] = std::string("floating-point range exceeded");
    }   

Modifying the newInstance function

Initialize the _identity_matrix data member in the first few lines of newInstance:

    inline void Likelihood::newInstance(unsigned nstates, int nrates, std::vector&lt;unsigned&gt; & subset_indices) { 
        unsigned num_subsets = (unsigned)subset_indices.size();
    
        bool is_invar_model = (nrates &lt; 0 ? true : false);
        unsigned ngammacat = (unsigned)(is_invar_model ? -nrates : nrates);
    
        <span style="color:#0000ff"><strong>// Create an identity matrix used for computing partials</strong></span>
        <span style="color:#0000ff"><strong>// for polytomies (represents the transition matrix</strong></span>
        <span style="color:#0000ff"><strong>// for the zero-length edges inserted to arbitrarily</strong></span>
        <span style="color:#0000ff"><strong>// resolve each polytomy)</strong></span>
        <span style="color:#0000ff"><strong>_identity_matrix.assign(nstates*nstates*ngammacat, 0.0);</strong></span>
        <span style="color:#0000ff"><strong>for (unsigned k = 0; k &lt; ngammacat; k++) {</strong></span>
            <span style="color:#0000ff"><strong>unsigned offset = k*nstates*nstates;</strong></span>
            <span style="color:#0000ff"><strong>_identity_matrix[0+offset]  = 1.0;</strong></span>
            <span style="color:#0000ff"><strong>_identity_matrix[5+offset]  = 1.0;</strong></span>
            <span style="color:#0000ff"><strong>_identity_matrix[10+offset] = 1.0;</strong></span>
            <span style="color:#0000ff"><strong>_identity_matrix[15+offset] = 1.0;</strong></span>
        <span style="color:#0000ff"><strong>}</strong></span>
        
        //...   
        
    }   

Modify the queuePartialsRecalculation function

Add the polytomy parameter and the highlighted lines below in the queuePartialsRecalculation member function:

    <span style="color:#0000ff"><strong>inline void Likelihood::queuePartialsRecalculation(Node * nd, Node * lchild, Node * rchild, Node * polytomy) {</strong></span>
        for (auto & info : _instances) {
            unsigned instance_specific_subset_index = 0;
            for (unsigned s : info.subsets) {
            
                <span style="color:#0000ff"><strong>if (polytomy) {</strong></span>
                    <span style="color:#0000ff"><strong>// nd has been pulled out of tree's _unused_nodes vector to break up the polytomy</strong></span>
                    <span style="color:#0000ff"><strong>// Note that the parameter "polytomy" is the polytomous node itself</strong></span>
                    
<span style="color:#0000ff"><strong></strong></span>
                    <span style="color:#0000ff"><strong>// First get the transition matrix index</strong></span>
                    <span style="color:#0000ff"><strong>unsigned tindex = getTMatrixIndex(nd, info, instance_specific_subset_index);</strong></span>

<span style="color:#0000ff"><strong></strong></span>
                    <span style="color:#0000ff"><strong>// Set the transition matrix for nd to the identity matrix</strong></span>
                    <span style="color:#0000ff"><strong>// note: last argument 1 is the value used for ambiguous states (should be 1 for transition matrices)</strong></span>
                    <span style="color:#0000ff"><strong>int code = beagleSetTransitionMatrix(info.handle, tindex, &_identity_matrix[0], 1);</strong></span>
                    <span style="color:#0000ff"><strong>if (code != 0)</strong></span>
                        <span style="color:#0000ff"><strong>throw XStrom(boost::str(boost::format("Failed to set transition matrix for instance %d. BeagleLib error code was %d (%s)") % info.handle % code % _beagle_error[code]));</strong></span>
                    
<span style="color:#0000ff"><strong></strong></span>
                    <span style="color:#0000ff"><strong>// Set the edgelength to 0.0 to maintain consistency with the transition matrix</strong></span>
                    <span style="color:#0000ff"><strong>nd-&gt;setEdgeLength(0.0);</strong></span>
                    
<span style="color:#0000ff"><strong></strong></span>
                    <span style="color:#0000ff"><strong>// If employing underflow scaling, the scaling factors for these fake nodes need to be</strong></span>
                    <span style="color:#0000ff"><strong>// transferred to the polytomous node, as that will be the only node remaining after the</strong></span>
                    <span style="color:#0000ff"><strong>// likelihood has been calculated. Save the scaling factor index, associating it with</strong></span>
                    <span style="color:#0000ff"><strong>// the scaling factor index of the polytomy node.</strong></span>
                    <span style="color:#0000ff"><strong>if (_underflow_scaling) {</strong></span>
                        <span style="color:#0000ff"><strong>// Get the polytomy's scaling factor index</strong></span>
                        <span style="color:#0000ff"><strong>int spolytomy = getScalerIndex(polytomy, info);</strong></span>
                        
<span style="color:#0000ff"><strong></strong></span>
                        <span style="color:#0000ff"><strong>// Get nd's scaling factor index</strong></span>
                        <span style="color:#0000ff"><strong>int snd = getScalerIndex(nd, info);</strong></span>
                        
<span style="color:#0000ff"><strong></strong></span>
                        <span style="color:#0000ff"><strong>// Save nd's index in the vector associated with polytomy's index</strong></span>
                        <span style="color:#0000ff"><strong>_polytomy_map[spolytomy].push_back(snd);</strong></span>
                    <span style="color:#0000ff"><strong>}</strong></span>
                    
<span style="color:#0000ff"><strong></strong></span>
                <span style="color:#0000ff"><strong>}</strong></span>
                
                addOperation(info, nd, lchild, rchild, instance_specific_subset_index);
                ++instance_specific_subset_index;
            }
        }
    }   

Modify the defineOperations function

This is the main modification made to the Likelihood class to allow for polytomies in trees. The vector _polytomy_stack starts out empty. Currently unused nodes are assigned to each polytomy until the polytomy is resolved (the number of children is just 2). Each fake internal node is assigned a transition matrix equal to the identity matrix because its edge length is necessarily zero (so no calculation of the transition matrix is needed). Operations are added to compute partials for each fake internal node and also the actual (polytomous) node. Each unused node that is put into service is stored in _polytomy_stack so that, after the likelihood is computed, all these fake internal nodes can be returned to their former unused state.

Add or change the highlighted lines in the defineOperations member function:

    inline void Likelihood::defineOperations(Tree::SharedPtr t) {   
        assert(_instances.size() &gt; 0);
        assert(t);
        assert(t-&gt;isRooted() == _rooted);
        assert(_polytomy_helpers.empty());
        assert(_polytomy_map.empty());

        _relrate_normalizing_constant = _model-&gt;calcNormalizingConstantForSubsetRelRates();

        // Start with a clean slate
        for (auto & info : _instances) {
            _operations[info.handle].clear();
            _pmatrix_index[info.handle].clear();
            _edge_lengths[info.handle].clear();
            _eigen_indices[info.handle].clear();
            _category_rate_indices[info.handle].clear();
        }

        // Loop through all nodes in reverse level order
        for (auto nd : boost::adaptors::reverse(t-&gt;_levelorder)) {
            assert(nd-&gt;_number &gt;= 0);
            if (!nd-&gt;_left_child) {
                // This is a leaf
                if (nd-&gt;isSelTMatrix())
                    queueTMatrixRecalculation(nd);
            }
            else {
                // This is an internal node
                if (nd-&gt;isSelTMatrix())
                    queueTMatrixRecalculation(nd);

                // Internal nodes have partials to be calculated, so define
                // an operation to compute the partials for this node
                if (nd-&gt;isSelPartial()) {
                    <span style="color:#0000ff"><strong>TreeManip tm(t);</strong></span>
                    <span style="color:#0000ff"><strong>if (tm.isPolytomy(nd)) {</strong></span>
                        <span style="color:#0000ff"><strong>// Internal node is a polytomy</strong></span>
                        <span style="color:#0000ff"><strong>unsigned nchildren = tm.countChildren(nd);</strong></span>
                        <span style="color:#0000ff"><strong>Node * a = nd-&gt;_left_child;</strong></span>
                        <span style="color:#0000ff"><strong>Node * b = a-&gt;_right_sib;</strong></span>
                        <span style="color:#0000ff"><strong>Node * c = 0;</strong></span>
                        <span style="color:#0000ff"><strong>for (unsigned k = 0; k &lt; nchildren - 2; k++) {</strong></span>
                            <span style="color:#0000ff"><strong>c = tm.getUnusedNode();</strong></span>
                            <span style="color:#0000ff"><strong>c-&gt;_left_child = a;</strong></span>
                            <span style="color:#0000ff"><strong>_polytomy_helpers.push_back(c);</strong></span>

<span style="color:#0000ff"><strong></strong></span>
                            <span style="color:#0000ff"><strong>queuePartialsRecalculation(c, a, b, nd);</strong></span>
                            
<span style="color:#0000ff"><strong></strong></span>
                            <span style="color:#0000ff"><strong>// Tackle next arm of the polytomy</strong></span>
                            <span style="color:#0000ff"><strong>b = b-&gt;_right_sib;</strong></span>
                            <span style="color:#0000ff"><strong>a = c;</strong></span>
                        <span style="color:#0000ff"><strong>}</strong></span>
                        
<span style="color:#0000ff"><strong></strong></span>
                        <span style="color:#0000ff"><strong>// Now add operation to compute the partial for the real internal node</strong></span>
                        <span style="color:#0000ff"><strong>queuePartialsRecalculation(nd, a, b);</strong></span>
                    <span style="color:#0000ff"><strong>}</strong></span>
                    <span style="color:#0000ff"><strong>else {</strong></span>
                        <span style="color:#0000ff"><strong>// Internal node is not a polytomy</strong></span>
                        <span style="color:#0000ff"><strong>Node * lchild = nd-&gt;_left_child;</strong></span>
                        <span style="color:#0000ff"><strong>assert(lchild);</strong></span>
                        <span style="color:#0000ff"><strong>Node * rchild = lchild-&gt;_right_sib;</strong></span>
                        <span style="color:#0000ff"><strong>assert(rchild);</strong></span>
                        <span style="color:#0000ff"><strong>queuePartialsRecalculation(nd, lchild, rchild);</strong></span>
                    <span style="color:#0000ff"><strong>}</strong></span>
                }
            }
        }
    }   

Modify the calculatePartials function

The polytomy helper nodes that were brought out of storage to help in computing the partials for polytomous nodes will be put back into storage after the likelihood calculation, so we must accumulate the scalers for these nodes now and transfer the sum of log scalers to the polytomous node itself if we don’t want to lose them.

    inline void Likelihood::calculatePartials() {   
        assert(_instances.size() &gt; 0);
        if (_operations.size() == 0)
            return;
        int code = 0;
        
        // Loop through all instances
        for (auto & info : _instances) {
            unsigned nsubsets = (unsigned)info.subsets.size();

            if (nsubsets &gt; 1) {
                code = beagleUpdatePartialsByPartition(
                    info.handle,                                                    // Instance number
                    (BeagleOperationByPartition *) &_operations[info.handle][0],    // BeagleOperation list specifying operations
                    (int)(_operations[info.handle].size()/9));                      // Number of operations
                if (code != 0)
                    throw XStrom(boost::format("failed to update partials. BeagleLib error code was %d (%s)") % code % _beagle_error[code]);
                
                <span style="color:#0000ff"><strong>if (_underflow_scaling) {</strong></span>
                    <span style="color:#0000ff"><strong>// Accumulate scaling factors across polytomy helpers and assign them to their parent node</strong></span>
                    <span style="color:#0000ff"><strong>for (auto & m : _polytomy_map) {</strong></span>
                        <span style="color:#0000ff"><strong>for (unsigned subset = 0; subset &lt; nsubsets; subset++) {</strong></span>
                            <span style="color:#0000ff"><strong>code = beagleAccumulateScaleFactorsByPartition(info.handle, &m.second[0], (int)m.second.size(), m.first, subset);</strong></span>
                            <span style="color:#0000ff"><strong>if (code != 0) {</strong></span>
                                <span style="color:#0000ff"><strong>throw XStrom(boost::format("failed to transfer scaling factors to polytomous node. BeagleLib error code was %d (%s)") % code % _beagle_error[code]);</strong></span>
                            <span style="color:#0000ff"><strong>}</strong></span>
                        <span style="color:#0000ff"><strong>}</strong></span>
                    <span style="color:#0000ff"><strong>}</strong></span>
                <span style="color:#0000ff"><strong>}</strong></span>
            }
            else {
                // no partitioning, just one data subset
                code = beagleUpdatePartials(
                    info.handle,                                        // Instance number
                    (BeagleOperation *) &_operations[info.handle][0],   // BeagleOperation list specifying operations
                    (int)(_operations[info.handle].size()/7),           // Number of operations
                    BEAGLE_OP_NONE);                                    // Index number of scaleBuffer to store accumulated factors
                if (code != 0) 
                    throw XStrom(boost::format("failed to update partials. BeagleLib error code was %d (%s)") % code % _beagle_error[code]);
                
                <span style="color:#0000ff"><strong>if (_underflow_scaling) {</strong></span>
                    <span style="color:#0000ff"><strong>// Accumulate scaling factors across polytomy helpers and assign them to their parent node</strong></span>
                    <span style="color:#0000ff"><strong>for (auto & m : _polytomy_map) {</strong></span>
                        <span style="color:#0000ff"><strong>code = beagleAccumulateScaleFactors(info.handle, &m.second[0], (int)m.second.size(), m.first);</strong></span>
                        <span style="color:#0000ff"><strong>if (code != 0) {</strong></span>
                            <span style="color:#0000ff"><strong>throw XStrom(boost::format("failed to transfer scaling factors to polytomous node. BeagleLib error code was %d (%s)") % code % _beagle_error[code]);</strong></span>
                        <span style="color:#0000ff"><strong>}</strong></span>
                    <span style="color:#0000ff"><strong>}</strong></span>
                <span style="color:#0000ff"><strong>}</strong></span>
            }   
        }
    }   

Modify the calcLogLikelihood function

All we need to do to the calcLogLikelihood function is to return the nodes we pulled out of storage back to their previous, unused state.

    inline double Likelihood::calcLogLikelihood(Tree::SharedPtr t) {    
        assert(_instances.size() &gt; 0);
        
        if (!_using_data)
            return 0.0;
        
        // Must call setData and setModel before calcLogLikelihood
        assert(_data);
        assert(_model);

        if (t-&gt;_is_rooted)
            throw XStrom("This version of the program can only compute likelihoods for unrooted trees");

        // Assuming "root" is leaf 0
        assert(t-&gt;_root-&gt;_number == 0 && t-&gt;_root-&gt;_left_child == t-&gt;_preorder[0] && !t-&gt;_preorder[0]-&gt;_right_sib);

        setModelRateMatrix();
        setAmongSiteRateHeterogenetity();
        defineOperations(t);
        updateTransitionMatrices();
        calculatePartials();
        
        double log_likelihood = 0.0;
        for (auto & info : _instances) {
            log_likelihood += calcInstanceLogLikelihood(info, t);
        }

        <span style="color:#0000ff"><strong>// We no longer need the internal nodes brought out of storage</strong></span>
        <span style="color:#0000ff"><strong>// and used to compute partials for polytomies</strong></span>
        <span style="color:#0000ff"><strong>TreeManip tm(t);</strong></span>
        <span style="color:#0000ff"><strong>for (Node * h : _polytomy_helpers) {</strong></span>
            <span style="color:#0000ff"><strong>tm.putUnusedNode(h);</strong></span>
        <span style="color:#0000ff"><strong>}</strong></span>
        <span style="color:#0000ff"><strong>_polytomy_helpers.clear();</strong></span>
        <span style="color:#0000ff"><strong>_polytomy_map.clear();</strong></span>
                
        return log_likelihood;
    }   

< 19.4 | 19.5 | 19.6 >