Maintained by Difan Deng and Marius Lindauer.
The following list considers papers related to neural architecture search. It is by no means complete. If you miss a paper on the list, please let us know.
Please note that although NAS methods steadily improve, the quality of empirical evaluations in this field are still lagging behind compared to other areas in machine learning, AI and optimization. We would therefore like to share some best practices for empirical evaluations of NAS methods, which we believe will facilitate sustained and measurable progress in the field. If you are interested in a teaser, please read our blog post or directly jump to our checklist.
Transformers have gained increasing popularity in different domains. For a comprehensive list of papers focusing on Neural Architecture Search for Transformer-Based spaces, the awesome-transformer-search repo is all you need.
2022
Wei, Lanning; He, Zhiqiang; Zhao, Huan; Yao, Quanming
Enhancing Intra-class Information Extraction for Heterophilous Graphs: One Neural Architecture Search Approach Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2211-10990,
title = {Enhancing Intra-class Information Extraction for Heterophilous Graphs: One Neural Architecture Search Approach},
author = {Lanning Wei and Zhiqiang He and Huan Zhao and Quanming Yao},
url = {https://doi.org/10.48550/arXiv.2211.10990},
doi = {10.48550/arXiv.2211.10990},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2211.10990},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Shahawy, Mohamed; Benkhelifa, Elhadj
HiveNAS: Neural Architecture Search using Artificial Bee Colony Optimization Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2211-10250,
title = {HiveNAS: Neural Architecture Search using Artificial Bee Colony Optimization},
author = {Mohamed Shahawy and Elhadj Benkhelifa},
url = {https://doi.org/10.48550/arXiv.2211.10250},
doi = {10.48550/arXiv.2211.10250},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2211.10250},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Wang, Lanfei; Xie, Lingxi; Bi, Kaifeng; Zhao, Kaili; Guo, Jun; Tian, Qi
M2NAS: Joint Neural Architecture Optimization System with Network Transmission Journal Article
In: IEEE Transactions on Computer-Aided Design of Integrated Circuits and Systems, pp. 1-1, 2022.
@article{9956879,
title = {M2NAS: Joint Neural Architecture Optimization System with Network Transmission},
author = {Lanfei Wang and Lingxi Xie and Kaifeng Bi and Kaili Zhao and Jun Guo and Qi Tian},
url = {https://ieeexplore.ieee.org/abstract/document/9956879},
doi = {10.1109/TCAD.2022.3223852},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {IEEE Transactions on Computer-Aided Design of Integrated Circuits and Systems},
pages = {1-1},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Guo, Bicheng; Guo, Shuxuan; Shi, Miaojing; Cheng, Peng; He, Shibo; Chen, Jiming; Yu, Kaicheng
(alpha) DARTS Once More: Enhancing Differentiable Architecture Search by Masked Image Modeling Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2211-10105,
title = {(alpha) DARTS Once More: Enhancing Differentiable Architecture Search by Masked Image Modeling},
author = {Bicheng Guo and Shuxuan Guo and Miaojing Shi and Peng Cheng and Shibo He and Jiming Chen and Kaicheng Yu},
url = {https://doi.org/10.48550/arXiv.2211.10105},
doi = {10.48550/arXiv.2211.10105},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2211.10105},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
He, Xin; Yao, Jiangchao; Wang, Yuxin; Tang, Zhenheng; Cheung, Ka Chun; See, Simon; Han, Bo; Chu, Xiaowen
NAS-LID: Efficient Neural Architecture Search with Local Intrinsic Dimension Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2211-12759,
title = {NAS-LID: Efficient Neural Architecture Search with Local Intrinsic Dimension},
author = {Xin He and Jiangchao Yao and Yuxin Wang and Zhenheng Tang and Ka Chun Cheung and Simon See and Bo Han and Xiaowen Chu},
url = {https://doi.org/10.48550/arXiv.2211.12759},
doi = {10.48550/arXiv.2211.12759},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2211.12759},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Hakim, Tal
Accuracy Prediction for NAS Acceleration using Feature Selection and Extrapolation Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2211-12419,
title = {Accuracy Prediction for NAS Acceleration using Feature Selection and Extrapolation},
author = {Tal Hakim},
url = {https://doi.org/10.48550/arXiv.2211.12419},
doi = {10.48550/arXiv.2211.12419},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2211.12419},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Milardo, Sebastiano; Rathore, Punit; Santi, Paolo; Ratti, Carlo
Ä Data-Driven Framework for Driving Style Classification Proceedings Article
In: Chen, Weitong; Yao, Lina; Cai, Taotao; Pan, Shirui; Shen, Tao; Li, Xue (Ed.): Ädvanced Data Mining and Applications", pp. 253–265, Springer Nature Switzerland, Cham, 2022, ISBN: 978-3-031-22137-8.
@inproceedings{10.1007/978-3-031-22137-8_19,
title = {Ä Data-Driven Framework for Driving Style Classification},
author = {Sebastiano Milardo and Punit Rathore and Paolo Santi and Carlo Ratti},
editor = {Weitong Chen and Lina Yao and Taotao Cai and Shirui Pan and Tao Shen and Xue Li},
url = {https://link.springer.com/chapter/10.1007/978-3-031-22137-8_19},
isbn = {978-3-031-22137-8},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Ädvanced Data Mining and Applications"},
pages = {253--265},
publisher = {Springer Nature Switzerland},
address = {Cham},
abstract = {Traditional driving behaviour recognition algorithms leverage hand-crafted features extracted from raw driving data and then apply user-defined machine learning models to identify driving behaviours. However, such solutions are limited by the set of selected features and by the chosen model. In this work, we present a data-driven driving behaviour recognition framework that utilizes an unsupervised feature extraction and feature selection algorithm and a deep neural network architecture obtained using an Automated Machine Learning (AutoML) approach. To validate the feasibility of this solution, numerical evaluations were performed on a unique real-world driving datasets collected from 29 professional truck drivers in uncontrolled environments, including supervisor's scoring of driver behavior that is used as ground truth data. Our experimental results show that the proposed deep neural network model achieves up to $$95backslash%$$95%accuracy for multi-class classification, significantly outperforming five other popular machine learning models.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Shen, Jinbo; Luo, Mengting; Liu, Han; Liao, Peixi; Chen, Hu; Zhang, Yi
MLF-IOSC: Multi-Level Fusion Network with Independent Operation Search Cell for Low-Dose CT Denoising Journal Article
In: IEEE Transactions on Medical Imaging, pp. 1-1, 2022.
@article{9963565,
title = {MLF-IOSC: Multi-Level Fusion Network with Independent Operation Search Cell for Low-Dose CT Denoising},
author = {Jinbo Shen and Mengting Luo and Han Liu and Peixi Liao and Hu Chen and Yi Zhang},
url = {https://ieeexplore.ieee.org/abstract/document/9963565},
doi = {10.1109/TMI.2022.3224396},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {IEEE Transactions on Medical Imaging},
pages = {1-1},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Chen, Oscal Tzyh-Chiang; Chang, Yu-Xuan; Jhao, Yu-Wei; Chung, Chih-Yu; Chang, Yun-Ling; Huang, Wei-Hsiang
3D Object Detection of Cars and Pedestrians by Deep Neural Networks from Unit-Sharing One-Shot NAS Proceedings Article
In: 2022 18th IEEE International Conference on Advanced Video and Signal Based Surveillance (AVSS), pp. 1-8, 2022.
@inproceedings{9959427,
title = {3D Object Detection of Cars and Pedestrians by Deep Neural Networks from Unit-Sharing One-Shot NAS},
author = {Oscal Tzyh-Chiang Chen and Yu-Xuan Chang and Yu-Wei Jhao and Chih-Yu Chung and Yun-Ling Chang and Wei-Hsiang Huang},
url = {https://ieeexplore.ieee.org/abstract/document/9959427},
doi = {10.1109/AVSS56176.2022.9959427},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 18th IEEE International Conference on Advanced Video and Signal Based Surveillance (AVSS)},
pages = {1-8},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Prabhakar, Swaroop N.; Deshwal, Ankur; Mishra, Rahul; Kim, Hyeonsu
DistilNAS: Neural Architecture Search With Distilled Data Journal Article
In: IEEE Access, vol. 10, pp. 124990-124998, 2022.
@article{9963961,
title = {DistilNAS: Neural Architecture Search With Distilled Data},
author = {Swaroop N. Prabhakar and Ankur Deshwal and Rahul Mishra and Hyeonsu Kim},
url = {https://ieeexplore.ieee.org/document/9963961},
doi = {10.1109/ACCESS.2022.3224788},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {IEEE Access},
volume = {10},
pages = {124990-124998},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Gurlahosur, Sunil V.; Meena, S. M.; Kulkarni, Uday; Dcosta, Winston; Lokur, Vineet; Sirigeri, Rohan V.; Porwal, Sajal; Sammed, S. P.; Mudenagudi, Uma
Comparative Analysis of Neural Architecture Search Methods for Classification of Cultural Heritage Sites Proceedings Article
In: Mudenagudi, Uma; Nigam, Aditya; Sarvadevabhatla, Ravi Kiran; Choudhary, Ayesha (Ed.): Proceedings of the Satellite Workshops of ICVGIP 2021, pp. 25–41, Springer Nature Singapore, Singapore, 2022, ISBN: 978-981-19-4136-8.
@inproceedings{10.1007/978-981-19-4136-8_2,
title = {Comparative Analysis of Neural Architecture Search Methods for Classification of Cultural Heritage Sites},
author = {Sunil V. Gurlahosur and S. M. Meena and Uday Kulkarni and Winston Dcosta and Vineet Lokur and Rohan V. Sirigeri and Sajal Porwal and S. P. Sammed and Uma Mudenagudi},
editor = {Uma Mudenagudi and Aditya Nigam and Ravi Kiran Sarvadevabhatla and Ayesha Choudhary},
url = {https://link.springer.com/chapter/10.1007/978-981-19-4136-8_2},
isbn = {978-981-19-4136-8},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Proceedings of the Satellite Workshops of ICVGIP 2021},
pages = {25--41},
publisher = {Springer Nature Singapore},
address = {Singapore},
abstract = {In the current era of Machine Learning, the performance of Neural Networks in object detection, image classification, and video analytics has improved with better design of architecture. It often requires engineers to spend substantial time and effort to design the network, which can be an error-prone method. This method does not exhaustively search the entire search space of possible neural network architecture and guarantees optimal accuracy from the designed model. Neural Architecture Search (NAS) automates this process to find the optimal network to outperform the hand-designed model. Though NAS methods have shown promising performance for image classification tasks, it is challenging to infer why they work well on standard data sets and perform poorly when transferring the same NAS method to custom/real-time data sets. This paper proposes a custom image data set based on Indian Heritage sites built using a crowdsourced framework to perform a comparative performance analysis of NAS methods for the image classification task. The data set consists of 20,000 color images of 1920*1080 pixels from 40 heritage sites, with 16,000 training and 4000 test images. The comparative study is performed on three primary NAS methods viz. Efficient Neural Architecture Search via parameter sharing (ENAS), Differentiable Architecture Search (DARTS), and Neural Architecture Search using Multi-Objective Genetic Algorithm (NSGA-Net). The DARTS showed 88.625% accuracy, ENAS showed 32.83% accuracy and NSGA-Net produced 69.92% accuracy on the custom data set.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Li, Muchen; Liu, Jeffrey Yunfan; Sigal, Leonid; Liao, Renjie
GraphPNAS: Learning Distribution of Good Neural Architectures via Deep Graph Generative Models Technical Report
2022.
@techreport{https://doi.org/10.48550/arxiv.2211.15155,
title = {GraphPNAS: Learning Distribution of Good Neural Architectures via Deep Graph Generative Models},
author = {Muchen Li and Jeffrey Yunfan Liu and Leonid Sigal and Renjie Liao},
url = {https://arxiv.org/abs/2211.15155},
doi = {10.48550/ARXIV.2211.15155},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
publisher = {arXiv},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Zeng, Wenxuan; Li, Meng; Xiong, Wenjie; Lu, Wenjie; Tan, Jin; Wang, Runsheng; Huang, Ru
MPCViT: Searching for MPC-friendly Vision Transformer with Heterogeneous Attention Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2211-13955,
title = {MPCViT: Searching for MPC-friendly Vision Transformer with Heterogeneous Attention},
author = {Wenxuan Zeng and Meng Li and Wenjie Xiong and Wenjie Lu and Jin Tan and Runsheng Wang and Ru Huang},
url = {https://doi.org/10.48550/arXiv.2211.13955},
doi = {10.48550/arXiv.2211.13955},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2211.13955},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Xu, Xianzhe; Jiang, Yiqi; Chen, Weihua; Huang, Yilun; Zhang, Yuan; Sun, Xiuyu
DAMO-YOLO : A Report on Real-Time Object Detection Design Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2211-15444,
title = {DAMO-YOLO : A Report on Real-Time Object Detection Design},
author = {Xianzhe Xu and Yiqi Jiang and Weihua Chen and Yilun Huang and Yuan Zhang and Xiuyu Sun},
url = {https://doi.org/10.48550/arXiv.2211.15444},
doi = {10.48550/arXiv.2211.15444},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2211.15444},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Shuai, Zhenhao; Liu, Hongbo; Wan, Zhaolin; Yu, Wei-Jie; Zhang, Jun
A Self-adaptive Neuroevolution Approach to Constructing Deep Neural Network Architectures Across Different Types Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2211-14753,
title = {A Self-adaptive Neuroevolution Approach to Constructing Deep Neural Network Architectures Across Different Types},
author = {Zhenhao Shuai and Hongbo Liu and Zhaolin Wan and Wei-Jie Yu and Jun Zhang},
url = {https://doi.org/10.48550/arXiv.2211.14753},
doi = {10.48550/arXiv.2211.14753},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2211.14753},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Vo, An; Pham, Tan Ngoc; Nguyen, Van Bich; Luong, Ngoc Hoang
Training-Free Multi-Objective and Many-Objective Evolutionary Neural Architecture Search with Synaptic Flow Proceedings Article
In: The 11th International Symposium on Information and Communication Technology, pp. 1–8, Association for Computing Machinery, Hanoi, Vietnam, 2022, ISBN: 9781450397254.
@inproceedings{10.1145/3568562.3568569,
title = {Training-Free Multi-Objective and Many-Objective Evolutionary Neural Architecture Search with Synaptic Flow},
author = {An Vo and Tan Ngoc Pham and Van Bich Nguyen and Ngoc Hoang Luong},
url = {https://doi.org/10.1145/3568562.3568569},
doi = {10.1145/3568562.3568569},
isbn = {9781450397254},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {The 11th International Symposium on Information and Communication Technology},
pages = {1–8},
publisher = {Association for Computing Machinery},
address = {Hanoi, Vietnam},
series = {SoICT 2022},
abstract = {Neural architecture search (NAS) algorithms often suffer from the expensive computation cost because a sufficient number of candidate architectures need to be evaluated during the search. Each architecture evaluation involves hundreds of training epochs to obtain proper weights for computing the accuracy of that architecture. Recently, a training-free performance metric Synaptic Flow has been proposed to facilitate these architecture evaluations. Synaptic Flow can be computed using randomly initialized network weights and its values are found to have certain correlation degree with network test accuracy. Furthermore, in real-world neural architecture designing, network performance (e.g., test accuracy) is not the sole objective, and network complexity metrics (e.g., the number of parameters, latency) are also considered. In this paper, we investigate several multi-objective NAS problem formulations, where each involves one performance metric and one complexity metric, and a many-objective NAS problem formulation, that involves one performance metric and four complexity metrics. We consider two variants of the performance metric for each formulation: a training-based variant that employs network accuracy and a training-free variant that employs Synaptic Flow. We use the non-dominated sorting genetic algorithm II to solve these NAS problem formulations, and then compare the quality of the obtained architectures and the efficiency of solving each formulation. Experimental results on standard benchmark NATS-Bench exhibit the advantages of the training-free many-objective evolutionary NAS (TF-MaOENAS) approach in obtaining competitive architectures with reasonable computing cost. The code is available at: https://github.com/ELO-Lab/TF-MaOENAS.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Wu, Xinle; Zhang, Dalin; Zhang, Miao; Guo, Chenjuan; Yang, Bin; Jensen, Christian S.
Joint Neural Architecture and Hyperparameter Search for Correlated Time Series Forecasting Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2211-16126,
title = {Joint Neural Architecture and Hyperparameter Search for Correlated Time Series Forecasting},
author = {Xinle Wu and Dalin Zhang and Miao Zhang and Chenjuan Guo and Bin Yang and Christian S. Jensen},
url = {https://doi.org/10.48550/arXiv.2211.16126},
doi = {10.48550/arXiv.2211.16126},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2211.16126},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Mills, Keith G.; Han, Fred X.; Zhang, Jialin; Chudak, Fabian; Mamaghani, Ali Safari; Salameh, Mohammad; Lu, Wei; Jui, Shangling; Niu, Di
GENNAPE: Towards Generalized Neural Architecture Performance Estimators Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2211-17226,
title = {GENNAPE: Towards Generalized Neural Architecture Performance Estimators},
author = {Keith G. Mills and Fred X. Han and Jialin Zhang and Fabian Chudak and Ali Safari Mamaghani and Mohammad Salameh and Wei Lu and Shangling Jui and Di Niu},
url = {https://doi.org/10.48550/arXiv.2211.17226},
doi = {10.48550/arXiv.2211.17226},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2211.17226},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Zhang, Tunhou; Ma, Mingyuan; Yan, Feng; Li, Hai; Chen, Yiran
PIDS: Joint Point Interaction-Dimension Search for 3D Point Cloud Technical Report
2022.
@techreport{DBLP:journals/corr/abs-2211-15759,
title = {PIDS: Joint Point Interaction-Dimension Search for 3D Point Cloud},
author = {Tunhou Zhang and Mingyuan Ma and Feng Yan and Hai Li and Yiran Chen},
url = {https://doi.org/10.48550/arXiv.2211.15759},
doi = {10.48550/arXiv.2211.15759},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {CoRR},
volume = {abs/2211.15759},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Li, Yufei; Bao, Liang; Huang, Kaipeng; Wu, Chase; Li, Xinwei
RSFIN: A Rule Search-based Fuzzy Inference Network for Performance Prediction of Configurable Software Systems Technical Report
2022.
@techreport{PPR:PPR577987,
title = {RSFIN: A Rule Search-based Fuzzy Inference Network for Performance Prediction of Configurable Software Systems},
author = {Yufei Li and Liang Bao and Kaipeng Huang and Chase Wu and Xinwei Li},
url = {https://doi.org/10.21203/rs.3.rs-2315849/v1},
doi = {10.21203/rs.3.rs-2315849/v1},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
publisher = {Research Square},
abstract = {Many modern software systems provide numerous configuration options to users and different configurations often lead to different performances. Due to the complex impact of a configuration on the system performance, users have to experimentally evaluate the performance for different configurations. However, it is practically infeasible to exhaust the almost infinite configuration space. To address this issue, various approaches have been proposed for performance prediction based on a limited number of configurations and corresponding performance measurements. Many of such efforts attempt to achieve a reasonable trade-off between experiment effort and prediction accuracy. In this paper, we propose a novel performance prediction model using a Rule Search-based Fuzzy Inference Network (RSFIN) based on ANFIS and NAS. One cognitive pattern is that, in systems, similar configurations produce similar performance. We experimentally validate this pattern based on data and introduce a configuration space under entropy. This view suggests the use of RSFIN to capture hidden distributions in configuration space. We implement and evaluate RSFIN using eleven real-world configurable software systems. Experimental results show that RSFIN achieves a better trade-off between measurement effort and prediction accuracy compared to other algorithms. In addition, the results also confirm that the evaluation of configuration space complexity based on data entropy is beneficial.},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Liu, Zhipu; Zhang, Lei; Zhang, David
Neural Image Parts Group Search for Person Re-identification Journal Article
In: IEEE Transactions on Circuits and Systems for Video Technology, pp. 1-1, 2022.
@article{9965432,
title = {Neural Image Parts Group Search for Person Re-identification},
author = {Zhipu Liu and Lei Zhang and David Zhang},
doi = {10.1109/TCSVT.2022.3225285},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {IEEE Transactions on Circuits and Systems for Video Technology},
pages = {1-1},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Huang, Junhao; Xue, Bing; Sun, Yanan; Zhang, Mengjie
EDE-NAS: An Eclectic Differential Evolution Approach to Single-Path Neural Architecture Search Proceedings Article
In: Äziz, Haris; Corr^ea, Débora; French, Tim" (Ed.): ÄI 2022: Advances in Artificial Intelligence", pp. 116–130, Springer International Publishing, Cham, 2022, ISBN: 978-3-031-22695-3.
@inproceedings{10.1007/978-3-031-22695-3_9,
title = {EDE-NAS: An Eclectic Differential Evolution Approach to Single-Path Neural Architecture Search},
author = {Junhao Huang and Bing Xue and Yanan Sun and Mengjie Zhang},
editor = {Haris Äziz and Débora Corr^ea and Tim" French},
url = {https://link.springer.com/chapter/10.1007/978-3-031-22695-3_9},
isbn = {978-3-031-22695-3},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {ÄI 2022: Advances in Artificial Intelligence"},
pages = {116--130},
publisher = {Springer International Publishing},
address = {Cham},
abstract = {Convolutional neural networks (CNNs) are a very prevalent and powerful deep learning paradigm. In recent years, many neural architecture search (NAS) methods have been developed to automate the design process of CNN architectures, significantly reducing human effort. Among various search techniques, differential evolution (DE), as a popular evolutionary computation algorithm, has advantages of fewer control variables, fast convergence and powerful optimization capability. However, existing DE-based NAS methods simply use conventional search operators, and do not consider the global and local information in the search process well, thus failing to achieve satisfactory results. In this paper, we propose an eclectic DE approach for NAS that can make good use of the search capability of DE. The architectural parameters are encoded into two parts according to their ranges. A discrete mutation operator is proposed to evolve the part that has a small search space, while a versatile mutation operator is devised for the other part with a large search space. The proposed DE algorithm can well balance the global and local search, and yields better overall results than most compared methods with a single-path CNN architecture design based on basic operations on four benchmark image classification datasets.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Zhang, Xin; Saniie, Jafar
Reinforcement Learning Based Neural Architecture Search for Flaw Detection in Intelligent Ultrasonic Imaging NDE System Proceedings Article
In: 2022 IEEE International Ultrasonics Symposium (IUS), pp. 1-4, 2022.
@inproceedings{9957772,
title = {Reinforcement Learning Based Neural Architecture Search for Flaw Detection in Intelligent Ultrasonic Imaging NDE System},
author = {Xin Zhang and Jafar Saniie},
url = {https://ieeexplore.ieee.org/abstract/document/9957772},
doi = {10.1109/IUS54386.2022.9957772},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 IEEE International Ultrasonics Symposium (IUS)},
pages = {1-4},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Yu, Jialiang; Gao, Song; Tian, Jie; Bian, Hongli; Liu, Hui; Li, Junqing
Evolutionary Neural Architecture Search Based on Variational Inference Bayesian Convolutional Neural Network Proceedings Article
In: 2022 4th International Conference on Data-driven Optimization of Complex Systems (DOCS), pp. 1-6, 2022.
@inproceedings{9967744,
title = {Evolutionary Neural Architecture Search Based on Variational Inference Bayesian Convolutional Neural Network},
author = {Jialiang Yu and Song Gao and Jie Tian and Hongli Bian and Hui Liu and Junqing Li},
url = {https://ieeexplore.ieee.org/abstract/document/9967744},
doi = {10.1109/DOCS55193.2022.9967744},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 4th International Conference on Data-driven Optimization of Complex Systems (DOCS)},
pages = {1-6},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Li, Jiaquan; Hong, Haokai; Shi, Minghui; Lin, Qiuzhen; Zhou, Fenfen; Tan, Kay Chen; Jiang, Min
Knowledge transfer for Object Detection with Evolution architecture search Proceedings Article
In: 2022 4th International Conference on Data-driven Optimization of Complex Systems (DOCS), pp. 1-6, 2022.
@inproceedings{9967711,
title = {Knowledge transfer for Object Detection with Evolution architecture search},
author = {Jiaquan Li and Haokai Hong and Minghui Shi and Qiuzhen Lin and Fenfen Zhou and Kay Chen Tan and Min Jiang},
url = {https://ieeexplore.ieee.org/abstract/document/9967711},
doi = {10.1109/DOCS55193.2022.9967711},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 4th International Conference on Data-driven Optimization of Complex Systems (DOCS)},
pages = {1-6},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Yuan, Jinliang; Xu, Mengwei; Zhao, Yuxin; Bian, Kaigui; Huang, Gang; Liu, Xuanzhe; Wang, Shangguang
Resource-Aware Federated Neural Architecture Search over Heterogeneous Mobile Devices Journal Article
In: IEEE Transactions on Big Data, pp. 1-11, 2022.
@article{9973344,
title = {Resource-Aware Federated Neural Architecture Search over Heterogeneous Mobile Devices},
author = {Jinliang Yuan and Mengwei Xu and Yuxin Zhao and Kaigui Bian and Gang Huang and Xuanzhe Liu and Shangguang Wang},
url = {https://ieeexplore.ieee.org/abstract/document/9973344},
doi = {10.1109/TBDATA.2022.3227403},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {IEEE Transactions on Big Data},
pages = {1-11},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Zhu, Yichen; Fu, Xiaowei
BNNAS++: Towards Unbiased Neural Architecture Search With Batch Normalization Journal Article
In: IEEE Access, vol. 10, pp. 128424-128432, 2022.
@article{9970308,
title = {BNNAS++: Towards Unbiased Neural Architecture Search With Batch Normalization},
author = {Yichen Zhu and Xiaowei Fu},
url = {https://ieeexplore.ieee.org/document/9970308},
doi = {10.1109/ACCESS.2022.3226692},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {IEEE Access},
volume = {10},
pages = {128424-128432},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Bouzidi, Halima; Odema, Mohanad; Ouarnoughi, Hamza; Faruque, Mohammad Abdullah Al; Niar, Smail
HADAS: Hardware-Aware Dynamic Neural Architecture Search for Edge Performance Scaling Technical Report
2022.
@techreport{https://doi.org/10.48550/arxiv.2212.03354,
title = {HADAS: Hardware-Aware Dynamic Neural Architecture Search for Edge Performance Scaling},
author = {Halima Bouzidi and Mohanad Odema and Hamza Ouarnoughi and Mohammad Abdullah Al Faruque and Smail Niar},
url = {https://arxiv.org/abs/2212.03354},
doi = {10.48550/ARXIV.2212.03354},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
publisher = {arXiv},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Guan, Ziyi; Zhou, Wenyong; Ren, Yuan; Xie, Rui; Yu, Hao; Wong, Ngai
A Hardware-Aware Neural Architecture Search Pareto Front Exploration for In-Memory Computing Proceedings Article
In: 2022 IEEE 16th International Conference on Solid-State & Integrated Circuit Technology (ICSICT), pp. 1-4, 2022.
@inproceedings{9963263,
title = {A Hardware-Aware Neural Architecture Search Pareto Front Exploration for In-Memory Computing},
author = {Ziyi Guan and Wenyong Zhou and Yuan Ren and Rui Xie and Hao Yu and Ngai Wong},
url = {https://ieeexplore.ieee.org/abstract/document/9963263},
doi = {10.1109/ICSICT55466.2022.9963263},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 IEEE 16th International Conference on Solid-State & Integrated Circuit Technology (ICSICT)},
pages = {1-4},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Peng, Yameng; Song, Andy; Ciesielski, Vic; Fayek, Haytham M.; Chang, Xiaojun
PRE-NAS: Evolutionary Neural Architecture Search with Predictor Journal Article
In: IEEE Transactions on Evolutionary Computation, pp. 1-1, 2022.
@article{9975797,
title = {PRE-NAS: Evolutionary Neural Architecture Search with Predictor},
author = {Yameng Peng and Andy Song and Vic Ciesielski and Haytham M. Fayek and Xiaojun Chang},
url = {https://ieeexplore.ieee.org/abstract/document/9975797},
doi = {10.1109/TEVC.2022.3227562},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {IEEE Transactions on Evolutionary Computation},
pages = {1-1},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Liu, Shiqing; Zhang, Haoyu; Jin, Yaochu
A survey on computationally efficient neural architecture search Journal Article
In: Journal of Automation and Intelligence, vol. 1, no. 1, pp. 100002, 2022, ISSN: 2949-8554.
@article{LIU2022100002,
title = {A survey on computationally efficient neural architecture search},
author = {Shiqing Liu and Haoyu Zhang and Yaochu Jin},
url = {https://www.sciencedirect.com/science/article/pii/S2949855422000028},
doi = {https://doi.org/10.1016/j.jai.2022.100002},
issn = {2949-8554},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Journal of Automation and Intelligence},
volume = {1},
number = {1},
pages = {100002},
abstract = {Neural architecture search (NAS) has become increasingly popular in the deep learning community recently, mainly because it can provide an opportunity to allow interested users without rich expertise to benefit from the success of deep neural networks (DNNs). However, NAS is still laborious and time-consuming because a large number of performance estimations are required during the search process of NAS, and training DNNs is computationally intensive. To solve this major limitation of NAS, improving the computational efficiency is essential in the design of NAS. However, a systematic overview of computationally efficient NAS (CE-NAS) methods still lacks. To fill this gap, we provide a comprehensive survey of the state-of-the-art on CE-NAS by categorizing the existing work into proxy-based and surrogate-assisted NAS methods, together with a thorough discussion of their design principles and a quantitative comparison of their performances and computational complexities. The remaining challenges and open research questions are also discussed, and promising research topics in this emerging field are suggested.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Falanti, Andrea; Lomurno, Eugenio; Ardagna, Danilo; Matteucci, Matteo
POPNASv3: a Pareto-Optimal Neural Architecture Search Solution for Image and Time Series Classification Technical Report
2022.
@techreport{https://doi.org/10.48550/arxiv.2212.06735,
title = {POPNASv3: a Pareto-Optimal Neural Architecture Search Solution for Image and Time Series Classification},
author = {Andrea Falanti and Eugenio Lomurno and Danilo Ardagna and Matteo Matteucci},
url = {https://arxiv.org/abs/2212.06735},
doi = {10.48550/ARXIV.2212.06735},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
publisher = {arXiv},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Lee, Bokyeung; Ko, Kyungdeuk; Hong, Jonghwan; Ko, Hanseok
Single Cell Training on Architecture Search for Image Denoising Technical Report
2022.
@techreport{https://doi.org/10.48550/arxiv.2212.06368,
title = {Single Cell Training on Architecture Search for Image Denoising},
author = {Bokyeung Lee and Kyungdeuk Ko and Jonghwan Hong and Hanseok Ko},
url = {https://arxiv.org/abs/2212.06368},
doi = {10.48550/ARXIV.2212.06368},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
publisher = {arXiv},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Patel, Ria; Rorabaugh, Ariel Keller; Olaya, Paula; Caino-Lores, Silvina; Channing, Georgia; Schuman, Catherine; Miyashita, Osamu; Tama, Florence; Taufer, Michela
A Methodology to Generate Efficient Neural Networks for Classification of Scientific Datasets Proceedings Article
In: 2022 IEEE 18th International Conference on e-Science (e-Science), pp. 389-390, 2022.
@inproceedings{9973637,
title = {A Methodology to Generate Efficient Neural Networks for Classification of Scientific Datasets},
author = {Ria Patel and Ariel Keller Rorabaugh and Paula Olaya and Silvina Caino-Lores and Georgia Channing and Catherine Schuman and Osamu Miyashita and Florence Tama and Michela Taufer},
url = {https://ieeexplore.ieee.org/abstract/document/9973637},
doi = {10.1109/eScience55777.2022.00052},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 IEEE 18th International Conference on e-Science (e-Science)},
pages = {389-390},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Dai, Haixing; Li, Qing; Zhao, Lin; Pan, Liming; Shi, Cheng; Liu, Zhengliang; Wu, Zihao; Zhang, Lu; Zhao, Shijie; Wu, Xia; Liu, Tianming; Zhu, Dajiang
Graph Representation Neural Architecture Search for Optimal Spatial/Temporal Functional Brain Network Decomposition Proceedings Article
In: Lian, Chunfeng; Cao, Xiaohuan; Rekik, Islem; Xu, Xuanang; Cui, Zhiming (Ed.): Machine Learning in Medical Imaging, pp. 279–287, Springer Nature Switzerland, Cham, 2022, ISBN: 978-3-031-21014-3.
@inproceedings{10.1007/978-3-031-21014-3_29,
title = {Graph Representation Neural Architecture Search for Optimal Spatial/Temporal Functional Brain Network Decomposition},
author = {Haixing Dai and Qing Li and Lin Zhao and Liming Pan and Cheng Shi and Zhengliang Liu and Zihao Wu and Lu Zhang and Shijie Zhao and Xia Wu and Tianming Liu and Dajiang Zhu},
editor = {Chunfeng Lian and Xiaohuan Cao and Islem Rekik and Xuanang Xu and Zhiming Cui},
url = {https://link.springer.com/chapter/10.1007/978-3-031-21014-3_29},
isbn = {978-3-031-21014-3},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Machine Learning in Medical Imaging},
pages = {279--287},
publisher = {Springer Nature Switzerland},
address = {Cham},
abstract = {Decomposing the spatial/temporal functional brain networks from 4D functional magnetic resonance imaging (fMRI) data has attracted extensive attention. Among all these efforts, deep neural network-based methods have shown significant advantages due to their powerful hierarchical representation ability. However, the network architectures of those deep learning models are manually crafted, which is time consuming and non-optimal. This paper presents a novel graph representation neural architecture search (GR-NAS) method based on graph representation to optimize the vanilla RNN cell structure for decomposing spatial/temporal brain networks. The core idea is to embed the discrete search space of the RNN cell into a continuous domain that preserves the topological information. After that, popular search algorithms, e.g., reinforcement learning (RL) and Bayesian optimization (BO), can be employed to find the optimal architecture in this continuous space. The proposed method was evaluated on the Human Connectome Project (HCP) task fMRI datasets. Extensive experiments demonstrated the superiority of the proposed model in brain network decomposition both spatially and temporally. To our best knowledge, the proposed model is among the early efforts using NAS strategy to optimally decompose spatial/temporal functional brain networks from fMRI data.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Garavagno, Andrea Mattia; Leonardis, Daniele; Frisoli, Antonio
Colab NAS: Obtaining lightweight task-specific convolutional neural networks following Occam's razor Technical Report
2022.
@techreport{https://doi.org/10.48550/arxiv.2212.07700,
title = {Colab NAS: Obtaining lightweight task-specific convolutional neural networks following Occam's razor},
author = {Andrea Mattia Garavagno and Daniele Leonardis and Antonio Frisoli},
url = {https://arxiv.org/abs/2212.07700},
doi = {10.48550/ARXIV.2212.07700},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
publisher = {arXiv},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Shavit, Hadar; Jatelnicki, Filip; Mor-Puigventós, Pol; Kowalczyk, Wojtek
From Xception to NEXcepTion: New Design Decisions and Neural Architecture Search Technical Report
2022.
@techreport{https://doi.org/10.48550/arxiv.2212.08448,
title = {From Xception to NEXcepTion: New Design Decisions and Neural Architecture Search},
author = {Hadar Shavit and Filip Jatelnicki and Pol Mor-Puigventós and Wojtek Kowalczyk},
url = {https://arxiv.org/abs/2212.08448},
doi = {10.48550/ARXIV.2212.08448},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
publisher = {arXiv},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}
Huang, Han; Shen, Li; He, Chaoyang; Dong, Weisheng; Liu, Wei
Differentiable Neural Architecture Search for Extremely Lightweight Image Super-Resolution Journal Article
In: IEEE Transactions on Circuits and Systems for Video Technology, pp. 1-1, 2022.
@article{9992250,
title = {Differentiable Neural Architecture Search for Extremely Lightweight Image Super-Resolution},
author = {Han Huang and Li Shen and Chaoyang He and Weisheng Dong and Wei Liu},
url = {https://ieeexplore.ieee.org/abstract/document/9992250},
doi = {10.1109/TCSVT.2022.3230824},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {IEEE Transactions on Circuits and Systems for Video Technology},
pages = {1-1},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Park, Seongmin; Kwon, Beomseok; Lim, Jieun; Sim, Kyuyoung; Kim, Taeho; Choi, Jungwook
Automatic Network Adaptation for Ultra-Low Uniform-Precision Quantization Miscellaneous
2022.
@misc{https://doi.org/10.48550/arxiv.2212.10878,
title = {Automatic Network Adaptation for Ultra-Low Uniform-Precision Quantization},
author = {Seongmin Park and Beomseok Kwon and Jieun Lim and Kyuyoung Sim and Taeho Kim and Jungwook Choi},
url = {https://arxiv.org/abs/2212.10878},
doi = {10.48550/ARXIV.2212.10878},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
publisher = {arXiv},
keywords = {},
pubstate = {published},
tppubtype = {misc}
}
Nikulenkov, Mikhail; Khamitov, Kamil; Popova, Nina
Regularization Approach for Accelerating Neural Architecture Search Proceedings Article
In: Voevodin, Vladimir; Sobolev, Sergey; Yakobovskiy, Mikhail; Shagaliev, Rashit (Ed.): Supercomputing, pp. 475–485, Springer International Publishing, Cham, 2022, ISBN: 978-3-031-22941-1.
@inproceedings{10.1007/978-3-031-22941-1_35,
title = {Regularization Approach for Accelerating Neural Architecture Search},
author = {Mikhail Nikulenkov and Kamil Khamitov and Nina Popova},
editor = {Vladimir Voevodin and Sergey Sobolev and Mikhail Yakobovskiy and Rashit Shagaliev},
url = {https://link.springer.com/chapter/10.1007/978-3-031-22941-1_35},
isbn = {978-3-031-22941-1},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Supercomputing},
pages = {475--485},
publisher = {Springer International Publishing},
address = {Cham},
abstract = {Modern Artificial Neural networks utilizes vast topologies to become applicable to the vast majority of the ML problems. Such topologies have an enormous number of parameters nowadays, which make it easier to use, but harder to train and modify. With such a number of parameters, the usage of semi-automatic techniques for constructing or adjusting new models for topical problems is vital for the whole industry. It makes it essential to optimize such methods like Neural Architecture Search (NAS) to efficiently utilize computational resources of the GPU and whole cluster. Since modern NAS tools are commonly used to optimize models in different areas or make a combination of models from the past, they have to use a plenty of computational power to perform certain hyperparameters optimization routines. Many NAS methods are of a highly parallel nature, but still they need a lot of computational power and have an enormous convergence time. The key to increasing performance of many NAS methods is boosting performance of the training of the highly-depth and synthesized model, to evaluate the probe of the model and obtain the score for this epoch. It means that demands for parallel implementations need to be available in different cluster configurations and should utilize as many nodes as possible, showing high scalability. However, straightforward approaches where NAS solving does not consider previous results lead to wasteful utilization of computation power. In this article, we introduce a new method that can improve convergence in NEAT-based NAS method using L1/L2 regularization during the evolution step.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Liu, Yuqiao; Li, Haipeng; Sun, Yanan; Liu, Shuaicheng
DAS: Neural Architecture Search via Distinguishing Activation Score Miscellaneous
2022.
@misc{https://doi.org/10.48550/arxiv.2212.12132,
title = {DAS: Neural Architecture Search via Distinguishing Activation Score},
author = {Yuqiao Liu and Haipeng Li and Yanan Sun and Shuaicheng Liu},
url = {https://arxiv.org/abs/2212.12132},
doi = {10.48550/ARXIV.2212.12132},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
publisher = {arXiv},
keywords = {},
pubstate = {published},
tppubtype = {misc}
}
Damarla, Anupama; Doraikannan, Sumathi
Optimized one-shot neural architecture search for skin cancer classification Journal Article
In: Journal of Electronic Imaging, vol. 31, no. 6, pp. 063053, 2022.
@article{10.1117/1.JEI.31.6.063053,
title = {Optimized one-shot neural architecture search for skin cancer classification},
author = {Anupama Damarla and Sumathi Doraikannan},
url = {https://doi.org/10.1117/1.JEI.31.6.063053},
doi = {10.1117/1.JEI.31.6.063053},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
journal = {Journal of Electronic Imaging},
volume = {31},
number = {6},
pages = {063053},
publisher = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Wang, Zhaolei; Lu, Kunfeng; Yu, Chunmei; Yao, Na; Wang, Ludi; Zhao, Jikang
Rocket Self-learning Control based on Lightweight Neural Network Architecture Search Proceedings Article
In: 2022 IEEE International Conference on Unmanned Systems (ICUS), pp. 1621-1626, 2022.
@inproceedings{9986957,
title = {Rocket Self-learning Control based on Lightweight Neural Network Architecture Search},
author = {Zhaolei Wang and Kunfeng Lu and Chunmei Yu and Na Yao and Ludi Wang and Jikang Zhao},
url = {https://ieeexplore.ieee.org/abstract/document/9986957},
doi = {10.1109/ICUS55513.2022.9986957},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 IEEE International Conference on Unmanned Systems (ICUS)},
pages = {1621-1626},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Ou, Yuwei; Xie, Xiangning; Gao, Shangce; Sun, Yanan; Tan, Kay Chen; Lv, Jiancheng
Differentiable Search of Accurate and Robust Architectures Miscellaneous
2022.
@misc{https://doi.org/10.48550/arxiv.2212.14049,
title = {Differentiable Search of Accurate and Robust Architectures},
author = {Yuwei Ou and Xiangning Xie and Shangce Gao and Yanan Sun and Kay Chen Tan and Jiancheng Lv},
url = {https://arxiv.org/abs/2212.14049},
doi = {10.48550/ARXIV.2212.14049},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
publisher = {arXiv},
keywords = {},
pubstate = {published},
tppubtype = {misc}
}
Yao, Siya; Zhao, Yu; Deng, Qi; Ma, Jun; Kang, Qi
Multi-objective Neural Architecture Adaptation in Transfer Learning Proceedings Article
In: 2022 IEEE International Conference on Networking, Sensing and Control (ICNSC), pp. 1-5, 2022.
@inproceedings{10004146,
title = {Multi-objective Neural Architecture Adaptation in Transfer Learning},
author = {Siya Yao and Yu Zhao and Qi Deng and Jun Ma and Qi Kang},
doi = {10.1109/ICNSC55942.2022.10004146},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 IEEE International Conference on Networking, Sensing and Control (ICNSC)},
pages = {1-5},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Yao, Siya; Zhao, Yu; Deng, Qi; Ma, Jun; Kang, Qi
Multi-objective Neural Architecture Adaptation in Transfer Learning Proceedings Article
In: 2022 IEEE International Conference on Networking, Sensing and Control (ICNSC), pp. 1-5, 2022.
@inproceedings{10004146b,
title = {Multi-objective Neural Architecture Adaptation in Transfer Learning},
author = {Siya Yao and Yu Zhao and Qi Deng and Jun Ma and Qi Kang},
doi = {10.1109/ICNSC55942.2022.10004146},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 IEEE International Conference on Networking, Sensing and Control (ICNSC)},
pages = {1-5},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Yao, Siya; Zhao, Yu; Deng, Qi; Ma, Jun; Kang, Qi
Multi-objective Neural Architecture Adaptation in Transfer Learning Proceedings Article
In: 2022 IEEE International Conference on Networking, Sensing and Control (ICNSC), pp. 1-5, 2022.
@inproceedings{10004146c,
title = {Multi-objective Neural Architecture Adaptation in Transfer Learning},
author = {Siya Yao and Yu Zhao and Qi Deng and Jun Ma and Qi Kang},
doi = {10.1109/ICNSC55942.2022.10004146},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 IEEE International Conference on Networking, Sensing and Control (ICNSC)},
pages = {1-5},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Li, Yutong
Forest fire smoke recognition and detection based on EfficientNet Proceedings Article
In: 2022 IEEE Conference on Telecommunications, Optics and Computer Science (TOCS), pp. 712-719, 2022.
@inproceedings{10016028,
title = {Forest fire smoke recognition and detection based on EfficientNet},
author = {Yutong Li},
url = {https://ieeexplore.ieee.org/abstract/document/10016028},
doi = {10.1109/TOCS56154.2022.10016028},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 IEEE Conference on Telecommunications, Optics and Computer Science (TOCS)},
pages = {712-719},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Lyu, Xinqi; Chen, Yibo; Chen, Jiamin; Liu, Xiangyue; Gao, Jianliang
Spatio-Temporal Based Architecture Topology Search for Multivariate Time Series Prediction Proceedings Article
In: 2022 IEEE International Conference on Big Data (Big Data), pp. 1304-1309, 2022.
@inproceedings{10020729,
title = {Spatio-Temporal Based Architecture Topology Search for Multivariate Time Series Prediction},
author = {Xinqi Lyu and Yibo Chen and Jiamin Chen and Xiangyue Liu and Jianliang Gao},
url = {https://ieeexplore.ieee.org/abstract/document/10020729},
doi = {10.1109/BigData55660.2022.10020729},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {2022 IEEE International Conference on Big Data (Big Data)},
pages = {1304-1309},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Wei, Zimian; Pan, Hengyue; Li, Lujun; Lu, Menglong; Niu, Xin; Dong, Peijie; Li, Dongsheng
Multi-trial Neural Architecture Search with Lottery Tickets Technical Report
2022.
@techreport{https://doi.org/10.48550/arxiv.2203.04300,
title = {Multi-trial Neural Architecture Search with Lottery Tickets},
author = {Zimian Wei and Hengyue Pan and Lujun Li and Menglong Lu and Xin Niu and Peijie Dong and Dongsheng Li},
url = {https://arxiv.org/abs/2203.04300},
doi = {10.48550/ARXIV.2203.04300},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
publisher = {arXiv},
keywords = {},
pubstate = {published},
tppubtype = {techreport}
}