{"payload":{"header_redesign_enabled":false,"results":[{"id":"670026629","archived":false,"color":"#3572A5","followers":759,"has_funding_file":false,"hl_name":"pjlab-sys4nlp/llama-moe","hl_trunc_description":"⛷️ LLaMA-MoE: Building Mixture-of-Experts from LLaMA with Continual Pre-training","language":"Python","mirror":false,"owned_by_organization":true,"public":true,"repo":{"repository":{"id":670026629,"name":"llama-moe","owner_id":124339599,"owner_login":"pjlab-sys4nlp","updated_at":"2024-02-26T04:58:35.308Z","has_issues":true}},"sponsorable":false,"topics":["moe","llama","mixture-of-experts","llm","continual-pre-training","expert-partition"],"type":"Public","help_wanted_issues_count":0,"good_first_issue_issues_count":0,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":71,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253Apjlab-sys4nlp%252Fllama-moe%2B%2Blanguage%253APython","metadata":null,"csrf_tokens":{"/pjlab-sys4nlp/llama-moe/star":{"post":"Dj2RCiRSmTv5uPgoZ6L1E-v4RI22LVzdH5mesgBy9H4Zin_FON8Yp2_qhe8MyxU6dR9EvDj_cUt6XRo8_09amw"},"/pjlab-sys4nlp/llama-moe/unstar":{"post":"V7ogKRp9ruQfmJcPT-RlUu8uxIoS_4LliRIyRTsdxPWQd_Eh2L0fr9p58JVipfBV8-ATnGBt7Ti846dsYSuAmw"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"C884u79pc6ofO7DuaE3NDtxyptmYDdzXJ04Pm_NbLd3MpmBZcw7VzMvWz2QIn7aZONNSqFhWOKVxgiM6JeuK9A"}}},"title":"Repository search results"}