Maintained by Difan Deng and Marius Lindauer.
The following list considers papers related to neural architecture search. It is by no means complete. If you miss a paper on the list, please let us know.
Please note that although NAS methods steadily improve, the quality of empirical evaluations in this field are still lagging behind compared to other areas in machine learning, AI and optimization. We would therefore like to share some best practices for empirical evaluations of NAS methods, which we believe will facilitate sustained and measurable progress in the field. If you are interested in a teaser, please read our blog post or directly jump to our checklist.
Transformers have gained increasing popularity in different domains. For a comprehensive list of papers focusing on Neural Architecture Search for Transformer-Based spaces, the awesome-transformer-search repo is all you need.
2021
Ge, Wanying; Panariello, Michele; Patino, Jose; Todisco, Massimiliano; Evans, Nicholas W D
Partially-Connected Differentiable Architecture Search for Deepfake and Spoofing Detection Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-03123,
title = {Partially-Connected Differentiable Architecture Search for Deepfake and Spoofing Detection},
author = {Wanying Ge and Michele Panariello and Jose Patino and Massimiliano Todisco and Nicholas W D Evans},
url = {https://arxiv.org/abs/2104.03123},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.03123},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Yu, Kaicheng; é, Ren; Salzmann, Mathieu
Landmark Regularization: Ranking Guided Super-Net Training in Neural Architecture Search Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-05309,
title = {Landmark Regularization: Ranking Guided Super-Net Training in Neural Architecture Search},
author = {Kaicheng Yu and Ren é and Mathieu Salzmann},
url = {https://arxiv.org/abs/2104.05309},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.05309},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Liu, Yukun; Li, Ta; Zhang, Pengyuan; Yan, Yonghong
Improved Conformer-based End-to-End Speech Recognition Using Neural Architecture Search Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-05390,
title = {Improved Conformer-based End-to-End Speech Recognition Using Neural Architecture Search},
author = {Yukun Liu and Ta Li and Pengyuan Zhang and Yonghong Yan},
url = {https://arxiv.org/abs/2104.05390},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.05390},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Wang, Chunnan; Chen, Bozhou; Li, Geng; Wang, Hongzhi
FL-AGCNS: Federated Learning Framework for Automatic Graph Convolutional Network Search Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-04141,
title = {FL-AGCNS: Federated Learning Framework for Automatic Graph Convolutional Network Search},
author = {Chunnan Wang and Bozhou Chen and Geng Li and Hongzhi Wang},
url = {https://arxiv.org/abs/2104.04141},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.04141},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Meng, Tianjian; Chen, Xiaohan; Jiang, Yifan; Wang, Zhangyang
A Design Space Study for LISTA and Beyond Proceedings Article
In: International Conference on Learning Representations, 2021.
@inproceedings{<LineBreak>meng2021a,
title = {A Design Space Study for LISTA and Beyond},
author = {Tianjian Meng and Xiaohan Chen and Yifan Jiang and Zhangyang Wang},
url = {https://openreview.net/forum?id=GMgHyUPrXa},
year = {2021},
date = {2021-01-01},
booktitle = {International Conference on Learning Representations},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Liu, Aoming; Huang, Zehao; Huang, Zhiwu; Wang, Naiyan
Direct Differentiable Augmentation Search Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-04282,
title = {Direct Differentiable Augmentation Search},
author = {Aoming Liu and Zehao Huang and Zhiwu Huang and Naiyan Wang},
url = {https://arxiv.org/abs/2104.04282},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.04282},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Tashiro, Kyosuke; Takeda, Koji; Tanaka, Kanji; Tomoe, Hiroki
TaylorMade VDD: Domain-adaptive Visual Defect Detector for High-mix Low-volume Production of Non-convex Cylindrical Metal Objects Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-04203,
title = {TaylorMade VDD: Domain-adaptive Visual Defect Detector for High-mix Low-volume Production of Non-convex Cylindrical Metal Objects},
author = {Kyosuke Tashiro and Koji Takeda and Kanji Tanaka and Hiroki Tomoe},
url = {https://arxiv.org/abs/2104.04203},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.04203},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Peter, David; Roth, Wolfgang; Pernkopf, Franz
End-to-end Keyword Spotting using Neural Architecture Search and Quantization Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-06666,
title = {End-to-end Keyword Spotting using Neural Architecture Search and Quantization},
author = {David Peter and Wolfgang Roth and Franz Pernkopf},
url = {https://arxiv.org/abs/2104.06666},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.06666},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Mundt, Martin; Pliushch, Iuliia; Ramesh, Visvanathan
Neural Architecture Search of Deep Priors: Towards Continual Learning without Catastrophic Interference Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-06788,
title = {Neural Architecture Search of Deep Priors: Towards Continual Learning without Catastrophic Interference},
author = {Martin Mundt and Iuliia Pliushch and Visvanathan Ramesh},
url = {https://arxiv.org/abs/2104.06788},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.06788},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Zhao, Huan; Yao, Quanming; -, Wei
Search to aggregate neighborhood for graph neural network Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-06608,
title = {Search to aggregate neighborhood for graph neural network},
author = {Huan Zhao and Quanming Yao and Wei -},
url = {https://arxiv.org/abs/2104.06608},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.06608},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Xue, Chao; Wang, Xiaoxing; Yan, Junchi; Hu, Yonggang; Yang, Xiaokang; Sun, Kewei
Rethinking Bi-Level Optimization in Neural Architecture Search: A Gibbs Sampling Perspective Proceedings Article
In: AAAI 2021, 2021.
@inproceedings{Xue2021AAAI,
title = {Rethinking Bi-Level Optimization in Neural Architecture Search: A Gibbs Sampling Perspective},
author = {Chao Xue and Xiaoxing Wang and Junchi Yan and Yonggang Hu and Xiaokang Yang and Kewei Sun},
url = {https://www.aaai.org/AAAI21Papers/AAAI-3116.XueC.pdf},
year = {2021},
date = {2021-01-01},
booktitle = {AAAI 2021},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Yin, Yihang; Huang, Siyu; Zhang, Xiang; Dou, Dejing
BM-NAS: Bilevel Multimodal Neural Architecture Search Technical Report
2021.
@techreport{yin2021bmnas,
title = {BM-NAS: Bilevel Multimodal Neural Architecture Search},
author = {Yihang Yin and Siyu Huang and Xiang Zhang and Dejing Dou},
url = {https://arxiv.org/abs/2104.09379},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Ahn, Joon Young; Cho, Nam Ik
Neural Architecture Search for Image Super-Resolution Using Densely Constructed Search Space: DeCoNAS Technical Report
2021.
@techreport{ahn2021neural,
title = {Neural Architecture Search for Image Super-Resolution Using Densely Constructed Search Space: DeCoNAS},
author = {Joon Young Ahn and Nam Ik Cho},
url = {https://arxiv.org/abs/2104.09048},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Wang, Jiahao; Shu, Han; Xia, Weihao; Yang, Yujiu; Wang, Yunhe
Coarse-to-Fine Searching for Efficient Generative Adversarial Networks Technical Report
2021.
@techreport{wang2021coarsetofine,
title = {Coarse-to-Fine Searching for Efficient Generative Adversarial Networks},
author = {Jiahao Wang and Han Shu and Weihao Xia and Yujiu Yang and Yunhe Wang},
url = {https://arxiv.org/abs/2104.09223},
year = {2021},
date = {2021-01-01},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Yan, Bin; Peng, Houwen; Wu, Kan; Wang, Dong; Fu, Jianlong; Lu, Huchuan
LightTrack: Finding Lightweight Neural Networks for Object Tracking via One-Shot Architecture Search Proceedings Article
In: CVPR2021, 2021.
@inproceedings{DBLP:journals/corr/abs-2104-14545,
title = {LightTrack: Finding Lightweight Neural Networks for Object Tracking via One-Shot Architecture Search},
author = {Bin Yan and Houwen Peng and Kan Wu and Dong Wang and Jianlong Fu and Huchuan Lu},
url = {https://arxiv.org/abs/2104.14545},
year = {2021},
date = {2021-01-01},
booktitle = {CVPR2021},
journal = {CoRR},
volume = {abs/2104.14545},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Chen, Minghao; Peng, Houwen; Fu, Jianlong; Ling, Haibin
One-Shot Neural Ensemble Architecture Search by Diversity-Guided Search Space Shrinking Proceedings Article
In: CVPR2021, 2021.
@inproceedings{DBLP:journals/corr/abs-2104-00597b,
title = {One-Shot Neural Ensemble Architecture Search by Diversity-Guided Search Space Shrinking},
author = {Minghao Chen and Houwen Peng and Jianlong Fu and Haibin Ling},
url = {https://arxiv.org/abs/2104.00597},
year = {2021},
date = {2021-01-01},
booktitle = {CVPR2021},
journal = {CoRR},
volume = {abs/2104.00597},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Verma, Monu; Reddy, Satish Kumar M; Meedimale, Yashwanth Reddy; Mandal, Murari; Vipparthi, Santosh Kumar
AutoMER: Spatiotemporal Neural Architecture Search for Microexpression Recognition Journal Article
In: IEEE Transactions on Neural Networks and Learning Systems, pp. 1-13, 2021.
@article{9411707,
title = {AutoMER: Spatiotemporal Neural Architecture Search for Microexpression Recognition},
author = {Monu Verma and Satish Kumar M Reddy and Yashwanth Reddy Meedimale and Murari Mandal and Santosh Kumar Vipparthi},
url = {https://ieeexplore.ieee.org/abstract/document/9411707/},
doi = {10.1109/TNNLS.2021.3072290},
year = {2021},
date = {2021-01-01},
journal = {IEEE Transactions on Neural Networks and Learning Systems},
pages = {1-13},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Wu, Haiwei; Zhou, Jiantao
IID-Net: Image Inpainting Detection Network via Neural Architecture Search and Attention Journal Article
In: IEEE Transactions on Circuits and Systems for Video Technology, pp. 1-1, 2021.
@article{9410590,
title = {IID-Net: Image Inpainting Detection Network via Neural Architecture Search and Attention},
author = {Haiwei Wu and Jiantao Zhou},
url = {https://ieeexplore.ieee.org/abstract/document/9410590},
doi = {10.1109/TCSVT.2021.3075039},
year = {2021},
date = {2021-01-01},
journal = {IEEE Transactions on Circuits and Systems for Video Technology},
pages = {1-1},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Song, Seok Bin; Nam, Jung Woo; Kim, Jin Heon
NAS-PPG: PPG based heart rate estimation using Neural Architecture Search Journal Article
In: IEEE Sensors Journal, pp. 1-1, 2021.
@article{9402889,
title = {NAS-PPG: PPG based heart rate estimation using Neural Architecture Search},
author = {Seok Bin Song and Jung Woo Nam and Jin Heon Kim},
url = {https://ieeexplore.ieee.org/abstract/document/9402889},
doi = {10.1109/JSEN.2021.3073047},
year = {2021},
date = {2021-01-01},
journal = {IEEE Sensors Journal},
pages = {1-1},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Bodin, Erik; Tomasi, Federico; Dai, Zhenwen
Making Differentiable Architecture Search less local Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-10450,
title = {Making Differentiable Architecture Search less local},
author = {Erik Bodin and Federico Tomasi and Zhenwen Dai},
url = {https://arxiv.org/abs/2104.10450},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.10450},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
-, Min; -, Hao; -, Min; -, Yu; -, Hsien; -, Yi; -, Hung; Jou, Kevin
Network Space Search for Pareto-Efficient Spaces Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-11014,
title = {Network Space Search for Pareto-Efficient Spaces},
author = {Min - and Hao - and Min - and Yu - and Hsien - and Yi - and Hung - and Kevin Jou},
url = {https://arxiv.org/abs/2104.11014},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.11014},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Duggal, Rahul; Zhou, Hao; Yang, Shuo; Xiong, Yuanjun; Xia, Wei; Tu, Zhuowen; Soatto, Stefano
Compatibility-aware Heterogeneous Visual Search Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2105-06047,
title = {Compatibility-aware Heterogeneous Visual Search},
author = {Rahul Duggal and Hao Zhou and Shuo Yang and Yuanjun Xiong and Wei Xia and Zhuowen Tu and Stefano Soatto},
url = {https://arxiv.org/abs/2105.06047},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2105.06047},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Di, Shimin; Yao, Quanming; Chen, Lei
Searching to Sparsify Tensor Decomposition for N-ary Relational Data Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-10625,
title = {Searching to Sparsify Tensor Decomposition for N-ary Relational Data},
author = {Shimin Di and Quanming Yao and Lei Chen},
url = {https://arxiv.org/abs/2104.10625},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.10625},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Di, Shimin; Yao, Quanming; Zhang, Yongqi; Chen, Lei
Efficient Relation-aware Scoring Function Search for Knowledge Graph Embedding Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-10880,
title = {Efficient Relation-aware Scoring Function Search for Knowledge Graph Embedding},
author = {Shimin Di and Quanming Yao and Yongqi Zhang and Lei Chen},
url = {https://arxiv.org/abs/2104.10880},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.10880},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Clemens, John
MLDS: A Dataset for Weight-Space Analysis of Neural Networks Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-10555,
title = {MLDS: A Dataset for Weight-Space Analysis of Neural Networks},
author = {John Clemens},
url = {https://arxiv.org/abs/2104.10555},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.10555},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Pasini, Massimiliano Lupo; Yin, Junqi; Li, Ying Wai; Eisenbach, Markus
A scalable algorithm for the optimization of neural network architectures Journal Article
In: Parallel Computing, vol. 104-105, pp. 102788, 2021, ISSN: 0167-8191.
@article{LUPOPASINI2021102788,
title = {A scalable algorithm for the optimization of neural network architectures},
author = {Massimiliano Lupo Pasini and Junqi Yin and Ying Wai Li and Markus Eisenbach},
url = {https://www.sciencedirect.com/science/article/pii/S0167819121000430},
doi = {https://doi.org/10.1016/j.parco.2021.102788},
issn = {0167-8191},
year = {2021},
date = {2021-01-01},
journal = {Parallel Computing},
volume = {104-105},
pages = {102788},
abstract = {We propose a new scalable method to optimize the architecture of an artificial neural network. The proposed algorithm, called Greedy Search for Neural Network Architecture, aims to determine a neural network with minimal number of layers that is at least as performant as neural networks of the same structure identified by other hyperparameter search algorithms in terms of accuracy and computational cost. Numerical results performed on benchmark datasets show that, for these datasets, our method outperforms state-of-the-art hyperparameter optimization algorithms in terms of attainable predictive performance by the selected neural network architecture, and time-to-solution for the hyperparameter optimization to complete.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Yang, Zhao; Sun, Qingshuang
Efficient Resource-aware Neural Architecture Search with Dynamic Adaptive Network Sampling Proceedings Article
In: 2021 IEEE International Symposium on Circuits and Systems (ISCAS), pp. 1-5, 2021.
@inproceedings{9401187,
title = {Efficient Resource-aware Neural Architecture Search with Dynamic Adaptive Network Sampling},
author = {Zhao Yang and Qingshuang Sun},
url = {https://ieeexplore.ieee.org/abstract/document/9401187},
doi = {10.1109/ISCAS51556.2021.9401187},
year = {2021},
date = {2021-01-01},
booktitle = {2021 IEEE International Symposium on Circuits and Systems (ISCAS)},
pages = {1-5},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Amer, Mohammed; á, Tom; Liao, Iman Yi
Balancing Accuracy and Latency in Multipath Neural Networks Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-12040,
title = {Balancing Accuracy and Latency in Multipath Neural Networks},
author = {Mohammed Amer and Tom á and Iman Yi Liao},
url = {https://arxiv.org/abs/2104.12040},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.12040},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Lei, Zhou; Yang, Kangkang; Jiang, Kai; Chen, Shengbo
KDAS-ReID: Architecture Search for Person Re-Identification via Distilled Knowledge with Dynamic Temperature Journal Article
In: Algorithms, vol. 14, no. 5, 2021, ISSN: 1999-4893.
@article{a14050137,
title = {KDAS-ReID: Architecture Search for Person Re-Identification via Distilled Knowledge with Dynamic Temperature},
author = {Zhou Lei and Kangkang Yang and Kai Jiang and Shengbo Chen},
url = {https://www.mdpi.com/1999-4893/14/5/137},
doi = {10.3390/a14050137},
issn = {1999-4893},
year = {2021},
date = {2021-01-01},
journal = {Algorithms},
volume = {14},
number = {5},
abstract = {Person re-Identification(Re-ID) based on deep convolutional neural networks (CNNs) achieves remarkable success with its fast speed. However, prevailing Re-ID models are usually built upon backbones that manually design for classification. In order to automatically design an effective Re-ID architecture, we propose a pedestrian re-identification algorithm based on knowledge distillation, called KDAS-ReID. When the knowledge of the teacher model is transferred to the student model, the importance of knowledge in the teacher model will gradually decrease with the improvement of the performance of the student model. Therefore, instead of applying the distillation loss function directly, we consider using dynamic temperatures during the search stage and training stage. Specifically, we start searching and training at a high temperature and gradually reduce the temperature to 1 so that the student model can better learn from the teacher model through soft targets. Extensive experiments demonstrate that KDAS-ReID performs not only better than other state-of-the-art Re-ID models on three benchmarks, but also better than the teacher model based on the ResNet-50 backbone.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Laube, Kevin Alexander; Zell, Andreas
Inter-choice dependent super-network weights Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-11522,
title = {Inter-choice dependent super-network weights},
author = {Kevin Alexander Laube and Andreas Zell},
url = {https://arxiv.org/abs/2104.11522},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.11522},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Chen, Oscal Tzyh-Chiang; Zhang, Yu Cheng; Chang, Yu-Xuan; Chang, Yu-Lung
Iterative Multiple-Path One-Shot NAS for the Optimized Performance Proceedings Article
In: 2021 IEEE International Symposium on Circuits and Systems (ISCAS), pp. 1-5, 2021.
@inproceedings{9401130,
title = {Iterative Multiple-Path One-Shot NAS for the Optimized Performance},
author = {Oscal Tzyh-Chiang Chen and Yu Cheng Zhang and Yu-Xuan Chang and Yu-Lung Chang},
url = {https://ieeexplore.ieee.org/abstract/document/9401130},
doi = {10.1109/ISCAS51556.2021.9401130},
year = {2021},
date = {2021-01-01},
booktitle = {2021 IEEE International Symposium on Circuits and Systems (ISCAS)},
pages = {1-5},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Risso, Matteo; Burrello, Alessio; Pagliari, Daniele Jahier; Benatti, Simone; Macii, Enrico; Benini, Luca; Poncino, Massimo
Robust and Energy-efficient PPG-based Heart-Rate Monitoring Proceedings Article
In: 2021 IEEE International Symposium on Circuits and Systems (ISCAS), pp. 1-5, 2021.
@inproceedings{9401282,
title = {Robust and Energy-efficient PPG-based Heart-Rate Monitoring},
author = {Matteo Risso and Alessio Burrello and Daniele Jahier Pagliari and Simone Benatti and Enrico Macii and Luca Benini and Massimo Poncino},
url = {https://ieeexplore.ieee.org/abstract/document/9401282},
doi = {10.1109/ISCAS51556.2021.9401282},
year = {2021},
date = {2021-01-01},
booktitle = {2021 IEEE International Symposium on Circuits and Systems (ISCAS)},
pages = {1-5},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Byun, Younghoon; Lee, Youngjoo
Rapid Design Space Exploration of Near-Optimal Memory-Reduced DCNN Architecture using Multiple Model Compression Techniques Proceedings Article
In: 2021 IEEE International Symposium on Circuits and Systems (ISCAS), pp. 1-5, 2021.
@inproceedings{9401489,
title = {Rapid Design Space Exploration of Near-Optimal Memory-Reduced DCNN Architecture using Multiple Model Compression Techniques},
author = {Younghoon Byun and Youngjoo Lee},
url = {https://ieeexplore.ieee.org/abstract/document/9401489},
doi = {10.1109/ISCAS51556.2021.9401489},
year = {2021},
date = {2021-01-01},
booktitle = {2021 IEEE International Symposium on Circuits and Systems (ISCAS)},
pages = {1-5},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Oymak, Samet; Li, Mingchen; Soltanolkotabi, Mahdi
Generalization Guarantees for Neural Architecture Search with Train-Validation Split Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-14132,
title = {Generalization Guarantees for Neural Architecture Search with Train-Validation Split},
author = {Samet Oymak and Mingchen Li and Mahdi Soltanolkotabi},
url = {https://arxiv.org/abs/2104.14132},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.14132},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Yan, Bin; Peng, Houwen; Wu, Kan; Wang, Dong; Fu, Jianlong; Lu, Huchuan
LightTrack: Finding Lightweight Neural Networks for Object Tracking via One-Shot Architecture Search Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-14545b,
title = {LightTrack: Finding Lightweight Neural Networks for Object Tracking via One-Shot Architecture Search},
author = {Bin Yan and Houwen Peng and Kan Wu and Dong Wang and Jianlong Fu and Huchuan Lu},
url = {https://arxiv.org/abs/2104.14545},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.14545},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Yang, Hong; Zhang, Ya-sheng; Yin, Can-bin; Ding, Wen-zhe
In: Defence Technology, 2021, ISSN: 2214-9147.
@article{YANG2021,
title = {Ultra-lightweight CNN design based on neural architecture search and knowledge distillation: A novel method to build the automatic recognition model of space target ISAR images},
author = {Hong Yang and Ya-sheng Zhang and Can-bin Yin and Wen-zhe Ding},
url = {https://www.sciencedirect.com/science/article/pii/S2214914721000763},
doi = {https://doi.org/10.1016/j.dt.2021.04.014},
issn = {2214-9147},
year = {2021},
date = {2021-01-01},
journal = {Defence Technology},
abstract = {In this paper, a novel method of ultra-lightweight convolution neural network (CNN) design based on neural architecture search (NAS) and knowledge distillation (KD) is proposed. It can realize the automatic construction of the space target inverse synthetic aperture radar (ISAR) image recognition model with ultra-lightweight and high accuracy. This method introduces the NAS method into the radar image recognition for the first time, which solves the time-consuming and labor-consuming problems in the artificial design of the space target ISAR image automatic recognition model (STIIARM). On this basis, the NAS model's knowledge is transferred to the student model with lower computational complexity by the flow of the solution procedure (FSP) distillation method. Thus, the decline of recognition accuracy caused by the direct compression of model structural parameters can be effectively avoided, and the ultra-lightweight STIIARM can be obtained. In the method, the Inverted Linear Bottleneck (ILB) and Inverted Residual Block (IRB) are firstly taken as each block's basic structure in CNN. And the expansion ratio, output filter size, number of IRBs, and convolution kernel size are set as the search parameters to construct a hierarchical decomposition search space. Then, the recognition accuracy and computational complexity are taken as the objective function and constraint conditions, respectively, and the global optimization model of the CNN architecture search is established. Next, the simulated annealing (SA) algorithm is used as the search strategy to search out the lightweight and high accuracy STIIARM directly. After that, based on the three principles of similar block structure, the same corresponding channel number, and the minimum computational complexity, the more lightweight student model is designed, and the FSP matrix pairing between the NAS model and student model is completed. Finally, by minimizing the loss between the FSP matrix pairs of the NAS model and student model, the student model's weight adjustment is completed. Thus the ultra-lightweight and high accuracy STIIARM is obtained. The proposed method's effectiveness is verified by the simulation experiments on the ISAR image dataset of five types of space targets.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Zhou, Zheng; Li, Tianfu; Zhang, Zilong; Zhao, Zhibin; Sun, Chuang; Yan, Ruqiang; Chen, Xuefeng
Bayesian Differentiable Architecture Search for Efficient Domain Matching Fault Diagnosis Journal Article
In: IEEE Transactions on Instrumentation and Measurement, vol. 70, pp. 1-11, 2021.
@article{9419078,
title = {Bayesian Differentiable Architecture Search for Efficient Domain Matching Fault Diagnosis},
author = {Zheng Zhou and Tianfu Li and Zilong Zhang and Zhibin Zhao and Chuang Sun and Ruqiang Yan and Xuefeng Chen},
url = {https://ieeexplore.ieee.org/abstract/document/9419078},
doi = {10.1109/TIM.2021.3076575},
year = {2021},
date = {2021-01-01},
journal = {IEEE Transactions on Instrumentation and Measurement},
volume = {70},
pages = {1-11},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Zhang, Lingling; Wang, Shaowei; Chang, Xiaojun; Liu, Jun; Ge, Zongyuan; Zheng, Qinghua
Auto-FSL: Searching the Attribute Consistent Network for Few-Shot Learning Journal Article
In: IEEE Transactions on Circuits and Systems for Video Technology, pp. 1-1, 2021.
@article{9419063,
title = {Auto-FSL: Searching the Attribute Consistent Network for Few-Shot Learning},
author = {Lingling Zhang and Shaowei Wang and Xiaojun Chang and Jun Liu and Zongyuan Ge and Qinghua Zheng},
url = {https://ieeexplore.ieee.org/abstract/document/9419063},
doi = {10.1109/TCSVT.2021.3076523},
year = {2021},
date = {2021-01-01},
journal = {IEEE Transactions on Circuits and Systems for Video Technology},
pages = {1-1},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Bruggemann, David; Kanakis, Menelaos; Obukhov, Anton; Georgoulis, Stamatios; Gool, Luc Van
Exploring Relational Context for Multi-Task Dense Prediction Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2104-13874,
title = {Exploring Relational Context for Multi-Task Dense Prediction},
author = {David Bruggemann and Menelaos Kanakis and Anton Obukhov and Stamatios Georgoulis and Luc Van Gool},
url = {https://arxiv.org/abs/2104.13874},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2104.13874},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Valsesia, Diego; Fracastoro, Giulia; Magli, Enrico
Don’t Stack Layers In Graph Neural Networks, Wire Them Randomly Proceedings Article
In: ICLR 2021 Workshop on Geometrical and Topological Representation Learning, 2021.
@inproceedings{Valsesia2021,
title = {Don’t Stack Layers In Graph Neural Networks, Wire Them Randomly},
author = {Valsesia, Diego and Fracastoro, Giulia and Magli, Enrico},
url = {https://openreview.net/pdf/224ad9f05a62ed1b5c7a1a23386bdcd29f717b8a.pdf},
year = {2021},
date = {2021-01-01},
booktitle = {ICLR 2021 Workshop on Geometrical and Topological Representation Learning},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
-, Julia Guerrero; Hauns, Sven; Izquierdo, Sergio; Miotto, Guilherme; Schrodi, Simon; Biedenkapp, Andre; Elsken, Thomas; Deng, Difan; Lindauer, Marius; Hutter, Frank
Bag of Baselines for Multi-objective Joint Neural Architecture Search and Hyperparameter Optimization Proceedings Article
In: 8th ICML Workshop on Automated Machine Learning, 2021.
@inproceedings{DBLP:journals/corr/abs-2105-01015,
title = {Bag of Baselines for Multi-objective Joint Neural Architecture Search and Hyperparameter Optimization},
author = {Julia Guerrero - and Sven Hauns and Sergio Izquierdo and Guilherme Miotto and Simon Schrodi and Andre Biedenkapp and Thomas Elsken and Difan Deng and Marius Lindauer and Frank Hutter},
url = {https://arxiv.org/abs/2105.01015},
year = {2021},
date = {2021-01-01},
booktitle = {8th ICML Workshop on Automated Machine Learning},
journal = {CoRR},
volume = {abs/2105.01015},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Lv, Jindi; Ye, Qing; Sun, Yanan; Zhao, Juan; Lv, Jiancheng
Heart-Darts: Classification of Heartbeats Using Differentiable Architecture Search Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2105-00693,
title = {Heart-Darts: Classification of Heartbeats Using Differentiable Architecture Search},
author = {Jindi Lv and Qing Ye and Yanan Sun and Juan Zhao and Jiancheng Lv},
url = {https://arxiv.org/abs/2105.00693},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2105.00693},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Mo, Hyunho; Custode, Leonardo Lucio; Iacca, Giovanni
Evolutionary neural architecture search for remaining useful life prediction Journal Article
In: Applied Soft Computing, vol. 108, pp. 107474, 2021, ISSN: 1568-4946.
@article{MO2021107474,
title = {Evolutionary neural architecture search for remaining useful life prediction},
author = {Hyunho Mo and Leonardo Lucio Custode and Giovanni Iacca},
url = {https://www.sciencedirect.com/science/article/pii/S1568494621003975},
doi = {https://doi.org/10.1016/j.asoc.2021.107474},
issn = {1568-4946},
year = {2021},
date = {2021-01-01},
journal = {Applied Soft Computing},
volume = {108},
pages = {107474},
abstract = {With the advent of Industry 4.0, making accurate predictions of the remaining useful life (RUL) of industrial components has become a crucial aspect in predictive maintenance (PdM). To this aim, various Deep Neural Network (DNN) models have been proposed in the recent literature. However, while the architectures of these models have a large impact on their performance, they are usually determined empirically. To exclude the time-consuming process and the unnecessary computational cost of manually engineering these models, we present a Neural Architecture Search (NAS) technique based on an Evolutionary Algorithm (EA) applied to optimize the architecture of a DNN used to predict the RUL. The EA explores the combinatorial parameter space of a multi-head Convolutional Neural Network with Long Short Term Memory (CNN-LSTM) to search for the best architecture. In particular, our method requires minimum computational resources by making use of an early stopping policy and a history of the evaluated architectures. We dub the proposed method ENAS-PdM. To our knowledge, this is the first work where an EA-based NAS is used to optimize a CNN-LSTM architecture in the field of PdM. In our experiments, we use the well-established Commercial Modular Aero-Propulsion System Simulation (C-MAPSS) dataset from NASA. Compared to the current state-of-the-art, our method obtains better results in terms of two different metrics, RMSE and Score, when aggregating across all the C-MAPSS sub-datasets. Without aggregation, we achieve lower RMSE in 3 out of 4 sub-datasets. Our experimental results verify that the proposed method is a reliable tool for obtaining state-of-the-art RUL predictions and as such it can have a strong impact in several industrial applications, especially those with limited available computing power.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Chen, Haoze; Zhang, Zhijie; Zhao, Chenyang; Liu, Jiaqi; Yin, Wuliang; Li, Yanfeng; Wang, Fengxiang; Li, Chao; Lin, Zhenyu
Depth Classification of Defects Based on Neural Architecture Search Journal Article
In: IEEE Access, vol. 9, pp. 73424-73432, 2021.
@article{9424564,
title = {Depth Classification of Defects Based on Neural Architecture Search},
author = {Haoze Chen and Zhijie Zhang and Chenyang Zhao and Jiaqi Liu and Wuliang Yin and Yanfeng Li and Fengxiang Wang and Chao Li and Zhenyu Lin},
url = {https://ieeexplore.ieee.org/abstract/document/9424564},
doi = {10.1109/ACCESS.2021.3077961},
year = {2021},
date = {2021-01-01},
journal = {IEEE Access},
volume = {9},
pages = {73424-73432},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Wei, Xun; Luo, Wang; Zhang, Xixi; Yang, Jie; Gui, Guan; Ohtsuki, Tomoaki
Differentiable Architecture Search-Based Automatic Modulation Classification Proceedings Article
In: 2021 IEEE Wireless Communications and Networking Conference (WCNC), pp. 1-6, 2021.
@inproceedings{9417449,
title = {Differentiable Architecture Search-Based Automatic Modulation Classification},
author = {Xun Wei and Wang Luo and Xixi Zhang and Jie Yang and Guan Gui and Tomoaki Ohtsuki},
url = {https://ieeexplore.ieee.org/abstract/document/9417449},
doi = {10.1109/WCNC49053.2021.9417449},
year = {2021},
date = {2021-01-01},
booktitle = {2021 IEEE Wireless Communications and Networking Conference (WCNC)},
pages = {1-6},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Chen, Shumei; Yu, Jianbo; Wang, Shijin
In: ISA Transactions, 2021, ISSN: 0019-0578.
@article{CHEN2021,
title = {One-dimensional convolutional neural network-based active feature extraction for fault detection and diagnosis of industrial processes and its understanding via visualization},
author = {Shumei Chen and Jianbo Yu and Shijin Wang},
url = {https://www.sciencedirect.com/science/article/pii/S0019057821002391},
doi = {https://doi.org/10.1016/j.isatra.2021.04.042},
issn = {0019-0578},
year = {2021},
date = {2021-01-01},
journal = {ISA Transactions},
abstract = {Feature extraction from process signals enables process monitoring models to be effective in industrial processes. Deep learning presents extensive possibilities for extracting abstract features from image and visual data. However, the main inputs of conventional deep neural networks are large images. To overcome this, a one-dimension convolution neural network-based model optimized by a reinforcement-learning-based neural architecture search, is proposed for multivariate processes control. The experimental results illustrate its predominance for detecting and recognizing process faults. Feature and network visualization are also implemented to explore the reasons for its outstanding performance. This research extends the applications of convolutional neural network based on one-dimension process signals in complex multivariate process control.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Liu, Mengyu; Yin, Hujun
Efficient pyramid context encoding and feature embedding for semantic segmentation Journal Article
In: Image and Vision Computing, vol. 111, pp. 104195, 2021, ISSN: 0262-8856.
@article{LIU2021104195,
title = {Efficient pyramid context encoding and feature embedding for semantic segmentation},
author = {Mengyu Liu and Hujun Yin},
url = {https://www.sciencedirect.com/science/article/pii/S0262885621001001},
doi = {https://doi.org/10.1016/j.imavis.2021.104195},
issn = {0262-8856},
year = {2021},
date = {2021-01-01},
journal = {Image and Vision Computing},
volume = {111},
pages = {104195},
abstract = {For reality applications of semantic segmentation, inference speed and memory usage are two important factors. To address these challenges, we propose a lightweight feature pyramid encoding network (FPENet) for semantic segmentation with a good trade-off between accuracy and speed. We use a series of feature pyramid encoding (FPE) blocks to encode context at multiple scales in the encoder. Each FPE block consists of different depthwise dilated convolutions that perform as a spatial pyramid to extract features and reduce computational costs. During training, a one-shot neural architecture search algorithm is adopted to find the optimal structure for each FPE block from a large search space with a small search cost. After the search for the encoder, a mutual embedding upsample module is introduced in the decoder, consisting of two attention blocks. The encoder-decoder attention mechanism is used to help aggregate efficiently high-level semantic features and low-level spatial details. The proposed network outperforms the existing real-time methods with fewer parameters and improved inference speed on the Cityscapes and CamVid benchmark datasets. Specifically, it achieved 72.3% mean IoU on the Cityscapes test set with only 0.4 M parameters and 192.6 FPS speed on an Nvidia Titan V100 GPU, and 73.4% mean IoU with 116.2 FPS when running on higher resolution images.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Gao, Yanjie; Zhu, Yonghao; Zhang, Hongyu; Lin, Haoxiang; Yang, Mao
Resource-Guided Configuration Space Reduction for Deep Learning Models Proceedings Article
In: 2021 IEEE/ACM 43rd International Conference on Software Engineering (ICSE), pp. 175-187, 2021.
@inproceedings{9402095,
title = {Resource-Guided Configuration Space Reduction for Deep Learning Models},
author = {Yanjie Gao and Yonghao Zhu and Hongyu Zhang and Haoxiang Lin and Mao Yang},
url = {https://ieeexplore.ieee.org/abstract/document/9402095},
doi = {10.1109/ICSE43902.2021.00028},
year = {2021},
date = {2021-01-01},
booktitle = {2021 IEEE/ACM 43rd International Conference on Software Engineering (ICSE)},
pages = {175-187},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Fei, Ke; Li, Qi; Cui, Can; chen, Xue; Xu, Xinxin; Xue, Benshan; Cai, Weifeng
Nontechnical Loss Detection using Neural Architecture Search and Outlier Detection Journal Article
In: E3S Web Conf., vol. 256, pp. 01025, 2021.
@article{refId0,
title = {Nontechnical Loss Detection using Neural Architecture Search and Outlier Detection},
author = {Ke} {Fei and Qi} {Li and Can} {Cui and Xue} {chen and Xinxin} {Xu and Benshan} {Xue and Weifeng} {Cai},
url = {https://doi.org/10.1051/e3sconf/202125601025},
doi = {10.1051/e3sconf/202125601025},
year = {2021},
date = {2021-01-01},
journal = {E3S Web Conf.},
volume = {256},
pages = {01025},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Wang, Xiaofang; Cao, Shengcao; Li, Mengtian; Kitani, Kris M
Neighborhood-Aware Neural Architecture Search Technical Report
2021.
@techreport{DBLP:journals/corr/abs-2105-06369,
title = {Neighborhood-Aware Neural Architecture Search},
author = {Xiaofang Wang and Shengcao Cao and Mengtian Li and Kris M Kitani},
url = {https://arxiv.org/abs/2105.06369},
year = {2021},
date = {2021-01-01},
journal = {CoRR},
volume = {abs/2105.06369},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}