{"payload":{"header_redesign_enabled":false,"results":[{"id":"197785701","archived":false,"color":"#3572A5","followers":869,"has_funding_file":false,"hl_name":"davidmrau/mixture-of-experts","hl_trunc_description":"PyTorch Re-Implementation of \"The Sparsely-Gated Mixture-of-Experts Layer\" by Noam Shazeer et al. https://arxiv.org/abs/1701.06538","language":"Python","mirror":false,"owned_by_organization":false,"public":true,"repo":{"repository":{"id":197785701,"name":"mixture-of-experts","owner_id":20661461,"owner_login":"davidmrau","updated_at":"2024-04-19T08:22:39.825Z","has_issues":true}},"sponsorable":false,"topics":["pytorch","moe","re-implementation","mixture-of-experts","sparsely-gated-mixture-of-experts"],"type":"Public","help_wanted_issues_count":0,"good_first_issue_issues_count":0,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":77,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253Adavidmrau%252Fmixture-of-experts%2B%2Blanguage%253APython","metadata":null,"csrf_tokens":{"/davidmrau/mixture-of-experts/star":{"post":"AIdJjD863Ju-2UDd_Fo_XlXFVgoeqLHTQphUnoAilt9kZP04GWAurCbS0Yv5LaDLNPWbS4dNbGNlPmYVG7ZX1g"},"/davidmrau/mixture-of-experts/unstar":{"post":"2K2htDL89fZqjWyduP9qP9vCArr9Re5oJkMgKmyh-uoZSc7qCXGr1j7JM4o9mlS08NzCmECt-w0XF1RT_GIiHg"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"JltDbR7B58vSdQOiW6RXHr4BPi2DZzvo4IRAJT7HPtvos5kQvuDae8pZRdjDWfKjhUmj6ji9ktM7YlBV87Rrig"}}},"title":"Repository search results"}