@ARTICLE{Ge_Xianlei_ADDP:_2023, author={Ge, Xianlei and Li, Xiaoyan and Zhang, Zhipeng}, volume={vol. 69}, number={No 4}, journal={International Journal of Electronics and Telecommunications}, pages={719-726}, howpublished={online}, year={2023}, publisher={Polish Academy of Sciences Committee of Electronics and Telecommunications}, abstract={Acquiring labels in anomaly detection tasks is expensive and challenging. Therefore, as an effective way to improve efficiency, pretraining is widely used in anomaly detection models, which enriches the model's representation capabilities, thereby enhancing both performance and efficiency in anomaly detection. In most pretraining methods, the decoder is typically randomly initialized. Drawing inspiration from the diffusion model, this paper proposed to use denoising as a task to pretrain the decoder in anomaly detection, which is trained to reconstruct the original noise-free input. Denoising requires the model to learn the structure, patterns, and related features of the data, particularly when training samples are limited. This paper explored two approaches on anomaly detection: simultaneous denoising pretraining for encoder and decoder, denoising pretraining for only decoder. Experimental results demonstrate the effectiveness of this method on improving model’s performance. Particularly, when the number of samples is limited, the improvement is more pronounced.}, type={Article}, title={ADDP: Anomaly Detection Based on Denoising Pretraining}, URL={http://www.czasopisma.pan.pl/Content/129114/PDF-MASTER/12-4321-Ge-sk.pdf}, doi={10.24425/ijet.2023.147693}, keywords={Anomaly Detection, Diffusion Models, image denoising, Pretraining, transfer learning}, }