{"payload":{"header_redesign_enabled":false,"results":[{"id":"465794584","archived":false,"color":"#DA5B0B","followers":716,"has_funding_file":false,"hl_name":"Denis2054/Transformers-for-NLP-2nd-Edition","hl_trunc_description":"Transformer models from BERT to GPT-4, environments from Hugging Face to OpenAI. Fine-tuning, training, and prompt engineering examples. …","language":"Jupyter Notebook","mirror":false,"owned_by_organization":false,"public":true,"repo":{"repository":{"id":465794584,"name":"Transformers-for-NLP-2nd-Edition","owner_id":30811222,"owner_login":"Denis2054","updated_at":"2024-01-04T09:54:36.111Z","has_issues":true}},"sponsorable":false,"topics":["python","nlp","machine-learning","natural-language-processing","deep-learning","transformers","pytorch","openai","bert","trax","huggingface-transformers","roberta-model","dall-e","gpt-4","chatgpt","dall-e-api","chatgpt-api","gpt-3-5-turbo","gpt-4-api"],"type":"Public","help_wanted_issues_count":0,"good_first_issue_issues_count":0,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":59,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253ADenis2054%252FTransformers-for-NLP-2nd-Edition%2B%2Blanguage%253A%2522Jupyter%2BNotebook%2522","metadata":null,"csrf_tokens":{"/Denis2054/Transformers-for-NLP-2nd-Edition/star":{"post":"6vdZ-980DmrQP8chcHM7hNa42gS4pGLE833pfG8PiBcbRrYDI5SG1bkIngNtgf5Nu5d6z1xuEsl3DLaSxtgU6w"},"/Denis2054/Transformers-for-NLP-2nd-Edition/unstar":{"post":"h3FS0mFl1tZIqT_mZElHfS6_9jhbsQGZwWLqkUpKWts5pPeKsDJCRBZdbb8o4gfOhyzRJIWTUBOoVrV2FvvziA"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"ob-FOW6-RxzaB6uuOq-TQk48TgxRs5pNFzJDIS7M2k197cnCTA63-cGfHNJ_wt2MnmRXVpIWbU8C0UsQ1ESJvA"}}},"title":"Repository search results"}