{"payload":{"header_redesign_enabled":false,"results":[{"id":"311540638","archived":false,"color":"#3572A5","followers":2,"has_funding_file":false,"hl_name":"gdao-research/attention_mechanism","hl_trunc_description":"An implementation of multiple notable attention mechanisms using TensorFlow 2","language":"Python","mirror":false,"owned_by_organization":false,"public":true,"repo":{"repository":{"id":311540638,"name":"attention_mechanism","owner_id":41265560,"owner_login":"gdao-research","updated_at":"2020-12-29T04:41:42.462Z","has_issues":true}},"sponsorable":false,"topics":["attention-mechanism","performer","reformer","self-attention","tensorflow2","vision-transformer","lambda-networks"],"type":"Public","help_wanted_issues_count":0,"good_first_issue_issues_count":0,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":79,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253Agdao-research%252Fattention_mechanism%2B%2Blanguage%253APython","metadata":null,"warn_limited_results":false,"csrf_tokens":{"/gdao-research/attention_mechanism/star":{"post":"i7dus9OG7dE9W25GkVKHFRI__1to21l7tGpumNOlwk5qdwqXwvoWknbf5x8B-RgiTeAQ-dOWxy_SDGXvNC-K3Q"},"/gdao-research/attention_mechanism/unstar":{"post":"WC7XFs5pOUUOKOzsjICCCAtD_DI6qo6MobLLKO7AXr1Fe4sy2EGoJe_JLI4uutjC61YuDytHDuWyydvlbut5qQ"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"pHn1fV8ksP8FGdMlrOoLkO2g3lMEjy0aetObM3qihT9HMTEHB3HSsJSCbLpUya0JZV2x8NLXIxh2Dv8i0821HQ"}}},"title":"Repository search results"}