{"payload":{"pageCount":1,"repositories":[{"type":"Public","name":"SMARTS","owner":"huawei-noah","isFork":false,"description":"Scalable Multi-Agent RL Training School for Autonomous Driving","allTopics":["python","simulator","autonomous-driving","reinforcement-learning"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":13,"issueCount":200,"starsCount":899,"forksCount":184,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-10T23:08:05.169Z"}},{"type":"Public","name":"Federated-Learning","owner":"huawei-noah","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":17,"forksCount":4,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-20T03:43:16.469Z"}},{"type":"Public","name":"Efficient-AI-Backbones","owner":"huawei-noah","isFork":false,"description":"Efficient AI Backbones including GhostNet, TNT and MLP, developed by Huawei Noah's Ark Lab.","allTopics":["tensorflow","transformer","imagenet","convolutional-neural-networks","pretrained-models","model-compression","efficient-inference","ghostnet","vision-transformer","pytorch"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":80,"starsCount":3874,"forksCount":690,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-08T09:07:42.433Z"}},{"type":"Public","name":"trustworthyAI","owner":"huawei-noah","isFork":false,"description":"Trustworthy AI related projects","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":28,"starsCount":913,"forksCount":209,"license":"Apache License 2.0","participation":[3,0,2,2,0,7,0,2,8,0,0,0,0,0,0,0,3,2,0,0,5,0,0,1,4,0,0,0,30,3,0,0,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-02-20T06:13:16.287Z"}},{"type":"Public","name":"Pretrained-Language-Model","owner":"huawei-noah","isFork":false,"description":"Pretrained language model and its related optimization techniques developed by Huawei Noah's Ark Lab.","allTopics":["quantization","model-compression","large-scale-distributed","pretrained-models","knowledge-distillation"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":10,"issueCount":98,"starsCount":2977,"forksCount":626,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-01-22T01:11:22.476Z"}},{"type":"Public","name":"noah-research","owner":"huawei-noah","isFork":false,"description":"Noah Research","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":23,"issueCount":62,"starsCount":839,"forksCount":159,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-12-27T23:17:29.231Z"}},{"type":"Public","name":"VanillaNet","owner":"huawei-noah","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":22,"starsCount":797,"forksCount":57,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-10-19T01:37:50.683Z"}},{"type":"Public","name":"xingtian","owner":"huawei-noah","isFork":false,"description":"xingtian is a componentized library for the development and verification of reinforcement learning algorithms","allTopics":["impala","dqn","ppo","muzero","qmix","reinforcement-learning-algorithms"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":5,"issueCount":6,"starsCount":303,"forksCount":89,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-09-12T12:56:36.507Z"}},{"type":"Public","name":"vega","owner":"huawei-noah","isFork":false,"description":"AutoML tools chain","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":51,"starsCount":840,"forksCount":176,"license":"Other","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-02-15T09:36:59.979Z"}},{"type":"Public","name":"Pretrained-IPT","owner":"huawei-noah","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":21,"starsCount":413,"forksCount":62,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2022-11-14T06:53:37.040Z"}},{"type":"Public","name":"Efficient-NLP","owner":"huawei-noah","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":4,"starsCount":78,"forksCount":11,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-04T18:53:28.475Z"}},{"type":"Public","name":"AdderNet","owner":"huawei-noah","isFork":false,"description":"Code for paper \" AdderNet: Do We Really Need Multiplications in Deep Learning?\"","allTopics":["imagenet","convolutional-neural-networks","efficient-inference","cvpr2020","pytorch"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":11,"starsCount":948,"forksCount":186,"license":"BSD 3-Clause \"New\" or \"Revised\" License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2022-03-19T02:54:28.110Z"}},{"type":"Public","name":"multi_hyp_cc","owner":"huawei-noah","isFork":false,"description":"[CVPR2020] A Multi-Hypothesis Approach to Color Constancy","allTopics":["pytorch","convolutional-neural-networks","cvpr","whitebalance","cvpr2020","color-constancy"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":81,"forksCount":14,"license":"Other","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2021-07-11T14:06:19.885Z"}},{"type":"Public","name":"BHT-ARIMA","owner":"huawei-noah","isFork":false,"description":"Code for paper: Block Hankel Tensor ARIMA for Multiple Short Time Series Forecasting (AAAI-20)","allTopics":["time-series","tensor-factorization","tensor-decomposition","arima-forecasting"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":3,"starsCount":98,"forksCount":37,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2021-06-08T01:29:04.738Z"}},{"type":"Public","name":"Disout","owner":"huawei-noah","isFork":false,"description":"Code for AAAI 2020 paper, Beyond Dropout: Feature Map Distortion to Regularize Deep Neural Networks (Disout).","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":220,"forksCount":39,"license":"BSD 3-Clause \"New\" or \"Revised\" License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2020-10-30T06:32:37.264Z"}},{"type":"Public","name":"BGCN","owner":"huawei-noah","isFork":false,"description":"A Tensorflow implementation of \"Bayesian Graph Convolutional Neural Networks\" (AAAI 2019).","allTopics":["gcn","bayesian-neural-networks","graph-convolutional-network","graph-learning","graph-neural-networks","graph-convolutional-neural-networks","gnn","bayesian-graph-neural-networks"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":1,"starsCount":151,"forksCount":46,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2019-09-08T17:26:43.139Z"}}],"repositoryCount":16,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"Repositories"}