{"payload":{"header_redesign_enabled":false,"results":[{"id":"816550325","archived":false,"color":"#DA5B0B","followers":4,"has_funding_file":false,"hl_name":"AIDajiangtang/LLM-from-scratch","hl_trunc_description":"Transformer、GPT2、BERT pre-training and fine-tuning from scratch","language":"Jupyter Notebook","mirror":false,"owned_by_organization":false,"public":true,"repo":{"repository":{"id":816550325,"name":"LLM-from-scratch","owner_id":18625471,"owner_login":"AIDajiangtang","updated_at":"2024-07-01T04:46:40.836Z","has_issues":true}},"sponsorable":false,"topics":["fine-tuning","pre-training","llm"],"type":"Public","help_wanted_issues_count":0,"good_first_issue_issues_count":0,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":77,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253AAIDajiangtang%252FLLM-from-scratch%2B%2Blanguage%253A%2522Jupyter%2BNotebook%2522","metadata":null,"csrf_tokens":{"/AIDajiangtang/LLM-from-scratch/star":{"post":"Ehtiqw71aki_HZpf-RWZrn5Ow_EqazVcSH-eutkzFR478qbY0W5dFFnm43cZ5Gb1Qx8L2NLKoBgkoD8cQUB2hA"},"/AIDajiangtang/LLM-from-scratch/unstar":{"post":"RicMNl0E6s6n2nEk8W1ERiWFrjHbgi7fpF3KOZUGf3mJOWI4lbgLIa3In3p4XkP2kSyrHKxti8GQ9o5CsrAQTQ"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"Z2HefvOB5dbO-U3yaXu1DZEpcqK9o1uEA9nfDJssPZeqD1CtYifdGWTOhGlEuPLDkQniNIpSraqRuFZskicWtg"}}},"title":"Repository search results"}