@inproceedings{6803180c41a14e6490a4d4c4034f621a,
title = "On the KL divergence of probability mixtures for belief contraction",
abstract = "Probabilistic belief change is an operation that takes a probability distribution representing a belief state along with an input sentence representing some information to be accommodated or removed, and maps it to a new probability distribution. In order to choose from many such mappings possible, techniques from information theory such as the principle of minimum cross-entropy have previously been used. Central to this principle is the Kullback-Leibler (KL) divergence. In this short study, we focus on the contraction of a belief state P by a belief a, which is the process of turning the belief a into a non-belief. The contracted belief state P−a can be represented as a mixture of two states: the original belief state P, and the resultant state P*¬a of revising P by ¬a. Crucial to this mixture is the mixing factor ∊ which determines the proportion of P and P*¬a that are to be used in this process. We show that once ∊ is determined, the KL divergence of P−a from P is given by a function whose only argument is ∊. We suggest that ∊ is not only a mixing factor but also captures relevant aspects of P and P*¬a required for computing the KL divergence.",
author = "Kinzang Chhogyal and Abhaya Nayak and Abdul Sattar",
year = "2015",
doi = "10.1007/978-3-319-24489-1_20",
language = "English",
isbn = "9783319244884",
series = "Lecture Notes in Computer Science",
publisher = "Springer, Springer Nature",
pages = "249--255",
editor = "Steffen H{\"o}lldobler and Markus Kr{\"o}tzsch and Rafael Pe{\~n}aloza and Sebastian Rudolph",
booktitle = "KI 2015: Advances in Artificial Intelligence",
address = "United States",
note = "38th Annual German Conference on Advances in Artificial Intelligence, AI 2015 ; Conference date: 21-09-2015 Through 25-09-2015",
}