{"payload":{"header_redesign_enabled":false,"results":[{"id":"323651234","archived":false,"color":"#3572A5","followers":6627,"has_funding_file":false,"hl_name":"EleutherAI/gpt-neox","hl_trunc_description":"An implementation of model parallel autoregressive transformers on GPUs, based on the Megatron and DeepSpeed libraries","language":"Python","mirror":false,"owned_by_organization":true,"public":true,"repo":{"repository":{"id":323651234,"name":"gpt-neox","owner_id":68924597,"owner_login":"EleutherAI","updated_at":"2024-05-22T12:34:59.137Z","has_issues":true}},"sponsorable":false,"topics":["transformers","language-model","gpt-3","deepspeed-library"],"type":"Public","help_wanted_issues_count":4,"good_first_issue_issues_count":8,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":91,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253AEleutherAI%252Fgpt-neox%2B%2Blanguage%253APython","metadata":null,"csrf_tokens":{"/EleutherAI/gpt-neox/star":{"post":"jtKoIGSmPKpx4AlAyKsS709hemVaywlcdoENpVKLmsrTp_AakGOKbTuf15VS0LoNERjvn8Gjznw6JQBdzu83kA"},"/EleutherAI/gpt-neox/unstar":{"post":"4B1ScLCXQm-9HTFPMxteS1I9kPWDI5YoX1V7lviPwpOygUK8IEHvfChDSP178nvOEdN3f0f97wCDAaHWC1-_OA"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"6b2TtHHm_EF-RSbQ-p1OkCT9rUSD0QtR_Aprq_kEyKjnE2n63uj1l4mhDkdBTKTm45Ke5H5zF1eUPsUFqpN-Zw"}}},"title":"Repository search results"}