@techreport{oai:ipsj.ixsq.nii.ac.jp:00210978, author = {Tetsuya, Araki and Hiroyuki, Miyata and Shin-ichi, Nakano and Tetsuya, Araki and Hiroyuki, Miyata and Shin-ichi, Nakano}, issue = {2}, month = {Apr}, note = {Given a set of n disjoint intervals on a line, and an integer k, we want to find k points in the intervals so that the minimum pairwise distance of the k points is maximized. Intuitively, given a set of n disjoint time intervals on a timeline, each of which is a time span we are allowed to check something, and an integer k, which is the number of times we will check something, we plan the k checking times so that the checks occur at equal time intervals as much as possible, that is, we want to maximize the minimum time interval between the k checking times. The problem is called the k-dispersion problem on intervals. If we need to choose exactly one point in each interval, so k = n, and the disjoint intervals are given in the sorted order on the line, then two O(n) time algorithms to solve the problem are known. In this paper we give the first O(n) time algorithm to solve the problem for any constant k. Here one can check twice or more in one time interval. Our algorithm works even if the disjoint intervals are given in any (not sorted) order. If the disjoint intervals are given in the sorted order on the line, then, by slightly modifying the algorithm, one can solve the problem in O(log n) time. This is the first sublinear time algorithm to solve the problem. Also we show some results on the k-dispersion problem on disks, including a PTAS., Given a set of n disjoint intervals on a line, and an integer k, we want to find k points in the intervals so that the minimum pairwise distance of the k points is maximized. Intuitively, given a set of n disjoint time intervals on a timeline, each of which is a time span we are allowed to check something, and an integer k, which is the number of times we will check something, we plan the k checking times so that the checks occur at equal time intervals as much as possible, that is, we want to maximize the minimum time interval between the k checking times. The problem is called the k-dispersion problem on intervals. If we need to choose exactly one point in each interval, so k = n, and the disjoint intervals are given in the sorted order on the line, then two O(n) time algorithms to solve the problem are known. In this paper we give the first O(n) time algorithm to solve the problem for any constant k. Here one can check twice or more in one time interval. Our algorithm works even if the disjoint intervals are given in any (not sorted) order. If the disjoint intervals are given in the sorted order on the line, then, by slightly modifying the algorithm, one can solve the problem in O(log n) time. This is the first sublinear time algorithm to solve the problem. Also we show some results on the k-dispersion problem on disks, including a PTAS.}, title = {k-Dispersion on Intervals}, year = {2021} }