Source code for cvxpy.atoms.elementwise.kl_div

"""
Copyright 2013 Steven Diamond

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""

from __future__ import division

from typing import List, Optional, Tuple

import numpy as np
from scipy.sparse import csc_matrix
from scipy.special import kl_div as kl_div_scipy

from cvxpy.atoms.elementwise.elementwise import Elementwise
from cvxpy.constraints.constraint import Constraint


[docs]class kl_div(Elementwise): """:math:`x\\log(x/y) - x + y` For disambiguation between kl_div and rel_entr, see https://github.com/cvxpy/cvxpy/issues/733 """ def __init__(self, x, y) -> None: super(kl_div, self).__init__(x, y) @Elementwise.numpy_numeric def numeric(self, values): x = values[0] y = values[1] return kl_div_scipy(x, y) def sign_from_args(self) -> Tuple[bool, bool]: """Returns sign (is positive, is negative) of the expression. """ # Always positive. return (True, False) def is_atom_convex(self) -> bool: """Is the atom convex? """ return True def is_atom_concave(self) -> bool: """Is the atom concave? """ return False def is_incr(self, idx) -> bool: """Is the composition non-decreasing in argument idx? """ return False def is_decr(self, idx) -> bool: """Is the composition non-increasing in argument idx? """ return False def _grad(self, values) -> List[Optional[csc_matrix]]: """Gives the (sub/super)gradient of the atom w.r.t. each argument. Matrix expressions are vectorized, so the gradient is a matrix. Args: values: A list of numeric values for the arguments. Returns: A list of SciPy CSC sparse matrices or None. """ if np.min(values[0]) <= 0 or np.min(values[1]) <= 0: # Non-differentiable. return [None, None] else: div = values[0]/values[1] grad_vals = [np.log(div), 1 - div] grad_list = [] for idx in range(len(values)): rows = self.args[idx].size cols = self.size grad_list += [kl_div.elemwise_grad_to_diag(grad_vals[idx], rows, cols)] return grad_list def _domain(self) -> List[Constraint]: """Returns constraints describing the domain of the node. """ return [self.args[0] >= 0, self.args[1] >= 0]