{"payload":{"header_redesign_enabled":false,"results":[{"id":"614325365","archived":false,"color":"#3572A5","followers":17606,"has_funding_file":false,"hl_name":"ymcui/Chinese-LLaMA-Alpaca","hl_trunc_description":"中文LLaMA&Alpaca大语言模型+本地CPU/GPU训练部署 (Chinese LLaMA & Alpaca LLMs)","language":"Python","mirror":false,"owned_by_organization":false,"public":true,"repo":{"repository":{"id":614325365,"name":"Chinese-LLaMA-Alpaca","owner_id":16095339,"owner_login":"ymcui","updated_at":"2024-04-30T04:28:38.928Z","has_issues":true}},"sponsorable":false,"topics":["nlp","llama","lora","quantization","alpaca","plm","pre-trained-language-models","large-language-models","llm","llama-2","alpaca-2"],"type":"Public","help_wanted_issues_count":0,"good_first_issue_issues_count":0,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":88,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253Aymcui%252FChinese-LLaMA-Alpaca%2B%2Blanguage%253APython","metadata":null,"csrf_tokens":{"/ymcui/Chinese-LLaMA-Alpaca/star":{"post":"1Ct3RTAZdcEPou922GUkhrdo88qeML-EMzYJfa1vJvICjcY3uu02w0gwKcYfRqWO61MFj8WJe8KQYht6Xisswg"},"/ymcui/Chinese-LLaMA-Alpaca/unstar":{"post":"ZV8DBcsLbAPF2JbBsJN1Kv6RstQffBr1G2jPEkdOTg4AJUGJ2tlGMF7yCIviujgbjoLubUl-s0uq4I-EoW-Bxw"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"MxgKxrH86CEaNclRZJQfygXwVpSOLnCccNGx213qwiFH6_4-Gt4z4zIzFE9gfhmVokB119MP8jN3AsbPWZQONQ"}}},"title":"Repository search results"}