{"payload":{"header_redesign_enabled":false,"results":[{"id":"816136553","archived":false,"color":"#3572A5","followers":33,"has_funding_file":false,"hl_name":"Spico197/MoE-SFT","hl_trunc_description":"🍼 Official implementation of Dynamic Data Mixing Maximizes Instruction Tuning for Mixture-of-Experts","language":"Python","mirror":false,"owned_by_organization":false,"public":true,"repo":{"repository":{"id":816136553,"name":"MoE-SFT","owner_id":22840952,"owner_login":"Spico197","updated_at":"2024-06-25T02:34:49.043Z","has_issues":true}},"sponsorable":false,"topics":["mixture-of-experts","large-language-models","instruction-tuning","dynamic-sampling"],"type":"Public","help_wanted_issues_count":0,"good_first_issue_issues_count":0,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":66,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253ASpico197%252FMoE-SFT%2B%2Blanguage%253APython","metadata":null,"warn_limited_results":false,"csrf_tokens":{"/Spico197/MoE-SFT/star":{"post":"jweLh0HQ09waCaPzc_GWh0Hg9Q_LYTJkwskuNJuhwCNmxudVsiroWrHQWF3NTdK_ywMD9nOoTGGUt6RDiMiHcw"},"/Spico197/MoE-SFT/unstar":{"post":"ILGJMk9A6-5ILxK8iqv8TFks0NEiOUYHM3Vzg1duUOC_MyDxVYb1EB3dhD-juLIroS5Ar-NapbDokae2K8fqTQ"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"_z-O7vl9KUJxgR3lqMGA7kO4tSxhatvmA9lDlYRV3qsyz7HGdk6u9BYbLa1jxA6FcWD3SVerAgtLISDzAO688Q"}}},"title":"Repository search results"}