@techreport{oai:ipsj.ixsq.nii.ac.jp:00224978,
 author = {村上, 恭哉 and 高橋, 慧智 and 市川, 昊平 and 飯田, 元 and Kyoya, Murakami and Keichi, Takahashi and Kohei, Ichikawa and Hajimu, Iida},
 issue = {38},
 month = {Mar},
 note = {近年,エッジコンピューティング環境においてもコンテナ技術が活用され,耐障害性の向上や負荷分散を目的に Kubernetes の導入が進んでいる.しかし,Kubernetes の負荷分散機能は拠点間の遅延を考慮しないため,地理的に分散した環境では効果的ではない.そこで,本研究では,拠点間の通信遅延とサービスの負荷に基づき,負荷分散の割合を制御する仕組みをサービスメッシュ機能を用いて構築した.広域分散する拠点を模擬した環境において,提案した動的な分散制御手法と従来の静的な分散制御手法と比較した結果,拠点間の遅延が大きい環境において,静的な分散手法と比べ,提案手法は累計リクエスト処理数に 5% 以上の向上が見られた., In recent years, containers are increasingly being adopted in edge computing environments, and Kubernetes is widely used to improve fault tolerance and load balance requests. However, Kubernetes is not effective in widely distributed environments because it does not consider the latency between containers when load balancing requests. Therefore, this re- search proposed a load balancing method that dynamically adjusts the ratio of requests forwarded to each site by considering the latency between sites using service mesh functions. The proposed method is compared to a conventional static load balancing method in a simulated environment of widely distributed sites. The results demonstrate that the proposed method improves the total number of served requests by more than 5% compared to the conventional method in a high-latency environment.},
 title = {エッジコンピューティングにおける拠点間の通信遅延を考慮したリクエスト分散制御の評価},
 year = {2023}
}