# Source code for cr.sparse._src.opt.smooth.entropy

# Copyright 2021 CR-Suite Development Team
#
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#
# Unless required by applicable law or agreed to in writing, software
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
[docs]def smooth_entropy(): r"""Entropy function :math:f(x) = -\sum(x_i \log (x_i)) and its gradient """ @jit def func(x): x = jnp.asarray(x) x = cnb.promote_arg_dtypes(x) v = lax.cond(jnp.any(x < 0), lambda _: -jnp.inf, lambda _: - jnp.vdot(x, cnb.log_pos(x)), None) return v @jit def gradient(x): x = jnp.asarray(x) x = cnb.promote_arg_dtypes(x) g = lax.cond(jnp.any(x < 0), lambda _: jnp.full_like(x, jnp.nan), lambda _: - cnb.log_pos(x) - 1, None) return g return build2(func, gradient)
def smooth_entropy_vg(): r"""Entropy function :math:f(x) = -\sum(x_i \log (x_i)) and its gradient optimized implementation """ def out_of_domain(x): v = -jnp.inf g = jnp.full_like(x, jnp.nan) return g, v def in_domain(x): logx = cnb.log_pos(x) g = -logx - 1 v = - jnp.vdot(x, logx) return g, v @jit def grad_val(x): x = jnp.asarray(x) x = cnb.promote_arg_dtypes(x) g, v = lax.cond(jnp.any(x < 0), lambda x: out_of_domain(x), lambda x: in_domain(x), x) return g, v basic = smooth_entropy() return build3(basic.func, basic.grad, grad_val)