@techreport{oai:ipsj.ixsq.nii.ac.jp:00210981, author = {Mikito, Nanashima and Mikito, Nanashima}, issue = {5}, month = {Apr}, note = {A PAC learning model contains a worst-case sense due to the following requirement : a learner must learn (1) all functions in a concept class on (2) all example distributions. Thus, whether average-case computation on a fixed distribution is sufficient for such learning is quite non-trivial and a significant question to understand the nature of learning. Recent studies on complexity theory implicitly revealed that average-case errorless computation for a distributional NP-problem is sufficient to resolve the worst-case requirement (1) alone. In this paper, we addressed the worst-case requirement (2) (alone) and the requirements (1) and (2) simultaneously to identify where the current non-average aspect of learning essentially arises. Specifically, we will show the following theorems: (i) polynomial-size circuits are efficiently distribution-free PAC learnable on average iff there exist auxiliary-input pseudorandom generators, and (ii) if DistNP ⊆ AvgP, then polynomial-size circuits are efficiently agnostic learnable under all P/poly-computable example distributions. Our learning algorithm works without specified an example distribution as a usual distribution-free learner, but the time and sample complexity depends on the complexity of example distributions., A PAC learning model contains a worst-case sense due to the following requirement : a learner must learn (1) all functions in a concept class on (2) all example distributions. Thus, whether average-case computation on a fixed distribution is sufficient for such learning is quite non-trivial and a significant question to understand the nature of learning. Recent studies on complexity theory implicitly revealed that average-case errorless computation for a distributional NP-problem is sufficient to resolve the worst-case requirement (1) alone. In this paper, we addressed the worst-case requirement (2) (alone) and the requirements (1) and (2) simultaneously to identify where the current non-average aspect of learning essentially arises. Specifically, we will show the following theorems: (i) polynomial-size circuits are efficiently distribution-free PAC learnable on average iff there exist auxiliary-input pseudorandom generators, and (ii) if DistNP ⊆ AvgP, then polynomial-size circuits are efficiently agnostic learnable under all P/poly-computable example distributions. Our learning algorithm works without specified an example distribution as a usual distribution-free learner, but the time and sample complexity depends on the complexity of example distributions.}, title = {On Learning from Average-Case Errorless Computing (Extended Abstract)}, year = {2021} }