{"id":4245,"date":"2022-05-09T10:06:49","date_gmt":"2022-05-09T02:06:49","guid":{"rendered":"http:\/\/139.9.1.231\/?p=4245"},"modified":"2022-05-09T10:06:51","modified_gmt":"2022-05-09T02:06:51","slug":"attention","status":"publish","type":"post","link":"http:\/\/139.9.1.231\/index.php\/2022\/05\/09\/attention\/","title":{"rendered":"\u6df1\u5ea6\u5b66\u4e60\u4e2d\u7684 Attention \u673a\u5236\u603b\u7ed3\u4e0e\u4ee3\u7801\u5b9e\u73b0\uff082017-2021\u5e74\uff09"},"content":{"rendered":"\n<p>\u8f6c\u8f7d\u81ea<a rel=\"noreferrer noopener\" href=\"https:\/\/www.zhihu.com\/people\/cver-38\" target=\"_blank\">\u4f5c\u8005\uff1amayiwei1998<br>\u672c\u6587\u8f6c\u8f7d\u81ea\uff1aGiantPandaCV<br>\u539f\u6587\u94fe\u63a5\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/mp.weixin.qq.com\/s\/QapP8FtYhvCrCFrRdW4zVg\" target=\"_blank\">\u6df1\u5ea6\u5b66\u4e60\u4e2d\u7684Attention\u603b\u7ed3<\/a><\/a><\/p>\n\n\n\n\n\n<h2 class=\"has-light-pink-background-color has-background\">github\u5730\u5740\uff1a<\/h2>\n\n\n\n<p class=\"has-bright-blue-background-color has-background\"><a href=\"https:\/\/github.com\/xmu-xiaoma666\/External-Attention-pytorch\">https:\/\/github.com\/xmu-xiaoma666\/External-Attention-pytorch<\/a><\/p>\n\n\n\n<h2>1. External Attention<\/h2>\n\n\n\n<h3>1.1. \u5f15\u7528<\/h3>\n\n\n\n<p>Beyond Self-attention: External Attention using Two Linear Layers for Visual Tasks.&#8212;arXiv 2021.05.05<\/p>\n\n\n\n<p>\u8bba\u6587\u5730\u5740\uff1ahttps:\/\/arxiv.org\/abs\/2105.02358<\/p>\n\n\n\n<h3>1.2. \u6a21\u578b\u7ed3\u6784<\/h3>\n\n\n\n<figure class=\"wp-block-image\"><img src=\"https:\/\/mmbiz.qpic.cn\/mmbiz_png\/SdQCib1UzF3sd5NLbGfy6Pmeo1j3WtooBvOGsqiaGDIaxSxfb1MUcvT5fw7qZMuHicjTccJFLXDbj3qYPUGhzJFXw\/640?wx_fmt=png&amp;wxfrom=5&amp;wx_lazy=1&amp;wx_co=1\" alt=\"\u56fe\u7247\"\/><\/figure>\n\n\n\n<h3>1.3. \u7b80\u4ecb<\/h3>\n\n\n\n<p>\u8fd9\u662f\u4e94\u6708\u4efd\u5728arXiv\u4e0a\u7684\u4e00\u7bc7\u6587\u7ae0\uff0c\u4e3b\u8981\u89e3\u51b3\u7684Self-Attention(SA)\u7684\u4e24\u4e2a\u75db\u70b9\u95ee\u9898\uff1a\uff081\uff09O(n^2)\u7684\u8ba1\u7b97\u590d\u6742\u5ea6\uff1b(2)SA\u662f\u5728\u540c\u4e00\u4e2a\u6837\u672c\u4e0a\u6839\u636e\u4e0d\u540c\u4f4d\u7f6e\u8ba1\u7b97Attention\uff0c\u5ffd\u7565\u4e86\u4e0d\u540c\u6837\u672c\u4e4b\u95f4\u7684\u8054\u7cfb\u3002\u56e0\u6b64\uff0c\u672c\u6587\u91c7\u7528\u4e86\u4e24\u4e2a\u4e32\u8054\u7684MLP\u7ed3\u6784\u4f5c\u4e3amemory units\uff0c\u4f7f\u5f97\u8ba1\u7b97\u590d\u6742\u5ea6\u964d\u4f4e\u5230\u4e86O(n)\uff1b\u6b64\u5916\uff0c\u8fd9\u4e24\u4e2amemory units\u662f\u57fa\u4e8e\u5168\u90e8\u7684\u8bad\u7ec3\u6570\u636e\u5b66\u4e60\u7684\uff0c\u56e0\u6b64\u4e5f\u9690\u5f0f\u7684\u8003\u8651\u4e86\u4e0d\u540c\u6837\u672c\u4e4b\u95f4\u7684\u8054\u7cfb\u3002<\/p>\n\n\n\n<h3>1.4. \u4f7f\u7528\u65b9\u6cd5<\/h3>\n\n\n\n<pre class=\"wp-block-code\"><code>from&nbsp;attention.ExternalAttention&nbsp;import&nbsp;ExternalAttention<br>import&nbsp;torch<br><br><br>input=torch.randn(50,49,512)<br>ea&nbsp;=&nbsp;ExternalAttention(d_model=512,S=8)<br>output=ea(input)<br>print(output.shape)<\/code><\/pre>\n\n\n\n<h2>2. Self Attention<\/h2>\n\n\n\n<h3>2.1. \u5f15\u7528<\/h3>\n\n\n\n<p>Attention Is All You Need&#8212;NeurIPS2017<\/p>\n\n\n\n<p>\u8bba\u6587\u5730\u5740\uff1ahttps:\/\/arxiv.org\/abs\/1706.03762<\/p>\n\n\n\n<h3>2.2. \u6a21\u578b\u7ed3\u6784<\/h3>\n\n\n\n<h3>2.3. \u7b80\u4ecb<\/h3>\n\n\n\n<p>       \u8fd9\u662fGoogle\u5728NeurIPS2017\u53d1\u8868\u7684\u4e00\u7bc7\u6587\u7ae0\uff0c\u5728CV\u3001NLP\u3001\u591a\u6a21\u6001\u7b49\u5404\u4e2a\u9886\u57df\u90fd\u6709\u5f88\u5927\u7684\u5f71\u54cd\u529b\uff0c\u76ee\u524d\u5f15\u7528\u91cf\u5df2\u7ecf2.2w+\u3002Transformer\u4e2d\u63d0\u51fa\u7684Self-Attention\u662fAttention\u7684\u4e00\u79cd\uff0c\u7528\u4e8e\u8ba1\u7b97\u7279\u5f81\u4e2d\u4e0d\u540c\u4f4d\u7f6e\u4e4b\u95f4\u7684\u6743\u91cd\uff0c\u4ece\u800c\u8fbe\u5230\u66f4\u65b0\u7279\u5f81\u7684\u6548\u679c\u3002\u9996\u5148\u5c06input feature\u901a\u8fc7FC\u6620\u5c04\u6210Q\u3001K\u3001V\u4e09\u4e2a\u7279\u5f81\uff0c\u7136\u540e\u5c06Q\u548cK\u8fdb\u884c\u70b9\u4e58\u7684\u5f97\u5230attention map\uff0c\u518d\u5c06attention map\u4e0eV\u505a\u70b9\u4e58\u5f97\u5230\u52a0\u6743\u540e\u7684\u7279\u5f81\u3002\u6700\u540e\u901a\u8fc7FC\u8fdb\u884c\u7279\u5f81\u7684\u6620\u5c04\uff0c\u5f97\u5230\u4e00\u4e2a\u65b0\u7684\u7279\u5f81\u3002\uff08\u5173\u4e8eTransformer\u548cSelf-Attention\u76ee\u524d\u7f51\u4e0a\u6709\u8bb8\u591a\u975e\u5e38\u597d\u7684\u8bb2\u89e3\uff0c\u8fd9\u91cc\u5c31\u4e0d\u505a\u8be6\u7ec6\u7684\u4ecb\u7ecd\u4e86\uff09<\/p>\n\n\n\n<h3>2.4. \u4f7f\u7528\u65b9\u6cd5<\/h3>\n\n\n\n<pre class=\"wp-block-code\"><code>from&nbsp;attention.SelfAttention&nbsp;import&nbsp;ScaledDotProductAttention<br>import&nbsp;torch<br><br>input=torch.randn(50,49,512)<br>sa&nbsp;=&nbsp;ScaledDotProductAttention(d_model=512,&nbsp;d_k=512,&nbsp;d_v=512,&nbsp;h=8)<br>output=sa(input,input,input)<br>print(output.shape)<\/code><\/pre>\n\n\n\n<h2>3. Squeeze-and-Excitation(SE) Attention<\/h2>\n\n\n\n<h3>3.1. \u5f15\u7528<\/h3>\n\n\n\n<p>Squeeze-and-Excitation Networks&#8212;CVPR2018<\/p>\n\n\n\n<p>\u8bba\u6587\u5730\u5740\uff1ahttps:\/\/arxiv.org\/abs\/1709.01507<\/p>\n\n\n\n<h3>3.2. \u6a21\u578b\u7ed3\u6784<\/h3>\n\n\n\n<figure class=\"wp-block-image\"><img src=\"https:\/\/mmbiz.qpic.cn\/mmbiz_png\/SdQCib1UzF3sd5NLbGfy6Pmeo1j3WtooBX4uBichfCgj1DibQYWKUIADJ2yMwa3KrSYpFXRRwaicxdUBH5ZFVJg5wA\/640?wx_fmt=png&amp;wxfrom=5&amp;wx_lazy=1&amp;wx_co=1\" alt=\"\u56fe\u7247\"\/><\/figure>\n\n\n\n<h3>3.3. \u7b80\u4ecb<\/h3>\n\n\n\n<p>\u8fd9\u662fCVPR2018\u7684\u4e00\u7bc7\u6587\u7ae0\uff0c\u540c\u6837\u975e\u5e38\u5177\u6709\u5f71\u54cd\u529b\uff0c\u76ee\u524d\u5f15\u7528\u91cf7k+\u3002\u672c\u6587\u662f\u505a\u901a\u9053\u6ce8\u610f\u529b\u7684\uff0c\u56e0\u5176\u7b80\u5355\u7684\u7ed3\u6784\u548c\u6709\u6548\u6027\uff0c\u5c06\u901a\u9053\u6ce8\u610f\u529b\u6380\u8d77\u4e86\u4e00\u6ce2\u5c0f\u9ad8\u6f6e\u3002\u5927\u9053\u81f3\u7b80\uff0c\u8fd9\u7bc7\u6587\u7ae0\u7684\u601d\u60f3\u53ef\u4ee5\u8bf4\u975e\u5e38\u7b80\u5355\uff0c\u9996\u5148\u5c06spatial\u7ef4\u5ea6\u8fdb\u884cAdaptiveAvgPool\uff0c\u7136\u540e\u901a\u8fc7\u4e24\u4e2aFC\u5b66\u4e60\u5230\u901a\u9053\u6ce8\u610f\u529b\uff0c\u5e76\u7528Sigmoid\u8fdb\u884c\u5f52\u4e00\u5316\u5f97\u5230Channel Attention Map,\u6700\u540e\u5c06Channel Attention Map\u4e0e\u539f\u7279\u5f81\u76f8\u4e58\uff0c\u5c31\u5f97\u5230\u4e86\u52a0\u6743\u540e\u7684\u7279\u5f81\u3002<\/p>\n\n\n\n<h3>3.4. \u4f7f\u7528\u65b9\u6cd5<\/h3>\n\n\n\n<pre class=\"wp-block-code\"><code>from&nbsp;attention.SEAttention&nbsp;import&nbsp;SEAttention<br>import&nbsp;torch<br><br>input=torch.randn(50,512,7,7)<br>se&nbsp;=&nbsp;SEAttention(channel=512,reduction=8)<br>output=se(input)<br>print(output.shape)<\/code><\/pre>\n\n\n\n<h2>4. Selective Kernel(SK) Attention<\/h2>\n\n\n\n<h3>4.1. \u5f15\u7528<\/h3>\n\n\n\n<p>Selective Kernel Networks&#8212;CVPR2019<\/p>\n\n\n\n<p>\u8bba\u6587\u5730\u5740\uff1ahttps:\/\/arxiv.org\/pdf\/1903.06586.pdf<\/p>\n\n\n\n<h3>4.2. \u6a21\u578b\u7ed3\u6784<\/h3>\n\n\n\n<figure class=\"wp-block-image\"><img src=\"https:\/\/mmbiz.qpic.cn\/mmbiz_png\/SdQCib1UzF3sd5NLbGfy6Pmeo1j3WtooBMllMRhezia9SwIkM1KrDKPWIs1ia7kNHWIRpdy4ia3KqOMdqVnytwgEVg\/640?wx_fmt=png&amp;wxfrom=5&amp;wx_lazy=1&amp;wx_co=1\" alt=\"\u56fe\u7247\"\/><\/figure>\n\n\n\n<h3>4.3. \u7b80\u4ecb<\/h3>\n\n\n\n<p>\u8fd9\u662fCVPR2019\u7684\u4e00\u7bc7\u6587\u7ae0\uff0c\u81f4\u656c\u4e86SENet\u7684\u601d\u60f3\u3002\u5728\u4f20\u7edf\u7684CNN\u4e2d\u6bcf\u4e00\u4e2a\u5377\u79ef\u5c42\u90fd\u662f\u7528\u76f8\u540c\u5927\u5c0f\u7684\u5377\u79ef\u6838\uff0c\u9650\u5236\u4e86\u6a21\u578b\u7684\u8868\u8fbe\u80fd\u529b\uff1b\u800cInception\u8fd9\u79cd\u201c\u66f4\u5bbd\u201d\u7684\u6a21\u578b\u7ed3\u6784\u4e5f\u9a8c\u8bc1\u4e86\uff0c\u7528\u591a\u4e2a\u4e0d\u540c\u7684\u5377\u79ef\u6838\u8fdb\u884c\u5b66\u4e60\u786e\u5b9e\u53ef\u4ee5\u63d0\u5347\u6a21\u578b\u7684\u8868\u8fbe\u80fd\u529b\u3002\u4f5c\u8005\u501f\u9274\u4e86SENet\u7684\u601d\u60f3\uff0c\u901a\u8fc7\u52a8\u6001\u8ba1\u7b97\u6bcf\u4e2a\u5377\u79ef\u6838\u5f97\u5230\u901a\u9053\u7684\u6743\u91cd\uff0c\u52a8\u6001\u7684\u5c06\u5404\u4e2a\u5377\u79ef\u6838\u7684\u7ed3\u679c\u8fdb\u884c\u878d\u5408\u3002<\/p>\n\n\n\n<p>\u4e2a\u4eba\u8ba4\u4e3a\uff0c\u4e4b\u6240\u4ee5\u6240\u8fd9\u7bc7\u6587\u7ae0\u4e5f\u80fd\u591f\u79f0\u4e4b\u4e3alightweight\uff0c\u662f\u56e0\u4e3a\u5bf9\u4e0d\u540ckernel\u7684\u7279\u5f81\u8fdb\u884c\u901a\u9053\u6ce8\u610f\u529b\u7684\u65f6\u5019\u662f\u53c2\u6570\u5171\u4eab\u7684\uff08i.e. \u56e0\u4e3a\u5728\u505aAttention\u4e4b\u524d\uff0c\u9996\u5148\u5c06\u7279\u5f81\u8fdb\u884c\u4e86\u878d\u5408\uff0c\u6240\u4ee5\u4e0d\u540c\u5377\u79ef\u6838\u7684\u7ed3\u679c\u5171\u4eab\u4e00\u4e2aSE\u6a21\u5757\u7684\u53c2\u6570\uff09\u3002<\/p>\n\n\n\n<p>\u672c\u6587\u7684\u65b9\u6cd5\u5206\u4e3a\u4e09\u4e2a\u90e8\u5206\uff1aSplit,Fuse,Select\u3002Split\u5c31\u662f\u4e00\u4e2amulti-branch\u7684\u64cd\u4f5c\uff0c\u7528\u4e0d\u540c\u7684\u5377\u79ef\u6838\u8fdb\u884c\u5377\u79ef\u5f97\u5230\u4e0d\u540c\u7684\u7279\u5f81\uff1bFuse\u90e8\u5206\u5c31\u662f\u7528SE\u7684\u7ed3\u6784\u83b7\u53d6\u901a\u9053\u6ce8\u610f\u529b\u7684\u77e9\u9635(N\u4e2a\u5377\u79ef\u6838\u5c31\u53ef\u4ee5\u5f97\u5230N\u4e2a\u6ce8\u610f\u529b\u77e9\u9635\uff0c\u8fd9\u6b65\u64cd\u4f5c\u5bf9\u6240\u6709\u7684\u7279\u5f81\u53c2\u6570\u5171\u4eab)\uff0c\u8fd9\u6837\u5c31\u53ef\u4ee5\u5f97\u5230\u4e0d\u540ckernel\u7ecf\u8fc7SE\u4e4b\u540e\u7684\u7279\u5f81\uff1bSelect\u64cd\u4f5c\u5c31\u662f\u5c06\u8fd9\u51e0\u4e2a\u7279\u5f81\u8fdb\u884c\u76f8\u52a0\u3002<\/p>\n\n\n\n<h3>4.4. \u4f7f\u7528\u65b9\u6cd5<\/h3>\n\n\n\n<pre class=\"wp-block-code\"><code>from&nbsp;attention.SKAttention&nbsp;import&nbsp;SKAttention<br>import&nbsp;torch<br><br>input=torch.randn(50,512,7,7)<br>se&nbsp;=&nbsp;SKAttention(channel=512,reduction=8)<br>output=se(input)<br>print(output.shape)<\/code><\/pre>\n\n\n\n<h2>5. CBAM Attention<\/h2>\n\n\n\n<h3>5.1. \u5f15\u7528<\/h3>\n\n\n\n<p>CBAM: Convolutional Block Attention Module&#8212;ECCV2018<\/p>\n\n\n\n<p>\u8bba\u6587\u5730\u5740\uff1ahttps:\/\/openaccess.thecvf.com\/content_ECCV_2018\/papers\/Sanghyun_Woo_Convolutional_Block_Attention_ECCV_2018_paper.pdf<\/p>\n\n\n\n<h3>5.2. \u6a21\u578b\u7ed3\u6784<\/h3>\n\n\n\n<figure class=\"wp-block-image\"><img src=\"https:\/\/mmbiz.qpic.cn\/mmbiz_png\/SdQCib1UzF3sd5NLbGfy6Pmeo1j3WtooBR8X66maVHOwx5a2UibGDGpia7oTBobwibqPaJ9vUOhdykbxPzXCO5iaUicA\/640?wx_fmt=png&amp;wxfrom=5&amp;wx_lazy=1&amp;wx_co=1\" alt=\"\u56fe\u7247\"\/><\/figure>\n\n\n\n<figure class=\"wp-block-image\"><img src=\"https:\/\/mmbiz.qpic.cn\/mmbiz_png\/SdQCib1UzF3sd5NLbGfy6Pmeo1j3WtooB3J53rB7u6yZLC2BRNDCicz3qlibVTd3533TQRsbDjzD5MmQYlGKrhoww\/640?wx_fmt=png&amp;wxfrom=5&amp;wx_lazy=1&amp;wx_co=1\" alt=\"\u56fe\u7247\"\/><\/figure>\n\n\n\n<h3>5.3. \u7b80\u4ecb<\/h3>\n\n\n\n<p>\u8fd9\u662fECCV2018\u7684\u4e00\u7bc7\u8bba\u6587\uff0c\u8fd9\u7bc7\u6587\u7ae0\u540c\u65f6\u4f7f\u7528\u4e86Channel Attention\u548cSpatial Attention\uff0c\u5c06\u4e24\u8005\u8fdb\u884c\u4e86\u4e32\u8054\uff08\u6587\u7ae0\u4e5f\u505a\u4e86\u5e76\u8054\u548c\u4e24\u79cd\u4e32\u8054\u65b9\u5f0f\u7684\u6d88\u878d\u5b9e\u9a8c\uff09\u3002<\/p>\n\n\n\n<p>Channel Attention\u65b9\u9762\uff0c\u5927\u81f4\u7ed3\u6784\u8fd8\u662f\u548cSE\u76f8\u4f3c\uff0c\u4e0d\u8fc7\u4f5c\u8005\u63d0\u51faAvgPool\u548cMaxPool\u6709\u4e0d\u540c\u7684\u8868\u793a\u6548\u679c\uff0c\u6240\u4ee5\u4f5c\u8005\u5bf9\u539f\u6765\u7684\u7279\u5f81\u5728Spatial\u7ef4\u5ea6\u5206\u522b\u8fdb\u884c\u4e86AvgPool\u548cMaxPool\uff0c\u7136\u540e\u7528SE\u7684\u7ed3\u6784\u63d0\u53d6channel attention\uff0c\u6ce8\u610f\u8fd9\u91cc\u662f\u53c2\u6570\u5171\u4eab\u7684\uff0c\u7136\u540e\u5c06\u4e24\u4e2a\u7279\u5f81\u76f8\u52a0\u540e\u505a\u5f52\u4e00\u5316\uff0c\u5c31\u5f97\u5230\u4e86\u6ce8\u610f\u529b\u77e9\u9635\u3002<\/p>\n\n\n\n<p>Spatial Attention\u548cChannel Attention\u7c7b\u4f3c\uff0c\u5148\u5728channel\u7ef4\u5ea6\u8fdb\u884c\u4e24\u79cdpool\u540e\uff0c\u5c06\u4e24\u4e2a\u7279\u5f81\u8fdb\u884c\u62fc\u63a5\uff0c\u7136\u540e\u75287&#215;7\u7684\u5377\u79ef\u6765\u63d0\u53d6Spatial Attention\uff08\u4e4b\u6240\u4ee5\u75287&#215;7\u662f\u56e0\u4e3a\u63d0\u53d6\u7684\u662f\u7a7a\u95f4\u6ce8\u610f\u529b\uff0c\u6240\u4ee5\u7528\u7684\u5377\u79ef\u6838\u5fc5\u987b\u8db3\u591f\u5927\uff09\u3002\u7136\u540e\u505a\u4e00\u6b21\u5f52\u4e00\u5316\uff0c\u5c31\u5f97\u5230\u4e86\u7a7a\u95f4\u7684\u6ce8\u610f\u529b\u77e9\u9635\u3002<\/p>\n\n\n\n<h3>5.4. \u4f7f\u7528\u65b9\u6cd5<\/h3>\n\n\n\n<pre class=\"wp-block-code\"><code>from&nbsp;attention.CBAM&nbsp;import&nbsp;CBAMBlock<br>import&nbsp;torch<br><br>input=torch.randn(50,512,7,7)<br>kernel_size=input.shape&#091;2]<br>cbam&nbsp;=&nbsp;CBAMBlock(channel=512,reduction=16,kernel_size=kernel_size)<br>output=cbam(input)<br>print(output.shape)<\/code><\/pre>\n\n\n\n<h2>6. BAM Attention<\/h2>\n\n\n\n<h3>6.1. \u5f15\u7528<\/h3>\n\n\n\n<p>BAM: Bottleneck Attention Module&#8212;BMCV2018<\/p>\n\n\n\n<p>\u8bba\u6587\u5730\u5740\uff1ahttps:\/\/arxiv.org\/pdf\/1807.06514.pdf<\/p>\n\n\n\n<h3>6.2. \u6a21\u578b\u7ed3\u6784<\/h3>\n\n\n\n<figure class=\"wp-block-image\"><img src=\"https:\/\/mmbiz.qpic.cn\/mmbiz_png\/SdQCib1UzF3sd5NLbGfy6Pmeo1j3WtooBZuYh1mqcibzDoYRWN6tGCN4wydDBf9sfRlHkysF2ibArlomOMt3alhNA\/640?wx_fmt=png&amp;wxfrom=5&amp;wx_lazy=1&amp;wx_co=1\" alt=\"\u56fe\u7247\"\/><\/figure>\n\n\n\n<h3>6.3. \u7b80\u4ecb<\/h3>\n\n\n\n<p>\u8fd9\u662fCBAM\u540c\u4f5c\u8005\u540c\u65f6\u671f\u7684\u5de5\u4f5c\uff0c\u5de5\u4f5c\u4e0eCBAM\u975e\u5e38\u76f8\u4f3c\uff0c\u4e5f\u662f\u53cc\u91cdAttention\uff0c\u4e0d\u540c\u7684\u662fCBAM\u662f\u5c06\u4e24\u4e2aattention\u7684\u7ed3\u679c\u4e32\u8054\uff1b\u800cBAM\u662f\u76f4\u63a5\u5c06\u4e24\u4e2aattention\u77e9\u9635\u8fdb\u884c\u76f8\u52a0\u3002<\/p>\n\n\n\n<p>Channel Attention\u65b9\u9762\uff0c\u4e0eSE\u7684\u7ed3\u6784\u57fa\u672c\u4e00\u6837\u3002Spatial Attention\u65b9\u9762\uff0c\u8fd8\u662f\u5728\u901a\u9053\u7ef4\u5ea6\u8fdb\u884cpool\uff0c\u7136\u540e\u7528\u4e86\u4e24\u6b213&#215;3\u7684\u7a7a\u6d1e\u5377\u79ef\uff0c\u6700\u540e\u5c06\u7528\u4e00\u6b211&#215;1\u7684\u5377\u79ef\u5f97\u5230Spatial Attention\u7684\u77e9\u9635\u3002<\/p>\n\n\n\n<p>\u6700\u540eChannel Attention\u548cSpatial Attention\u77e9\u9635\u8fdb\u884c\u76f8\u52a0\uff08\u8fd9\u91cc\u7528\u5230\u4e86\u5e7f\u64ad\u673a\u5236\uff09\uff0c\u5e76\u8fdb\u884c\u5f52\u4e00\u5316\uff0c\u8fd9\u6837\u4e00\u6765\uff0c\u5c31\u5f97\u5230\u4e86\u7a7a\u95f4\u548c\u901a\u9053\u7ed3\u5408\u7684attention\u77e9\u9635\u3002<\/p>\n\n\n\n<h3>6.4.\u4f7f\u7528\u65b9\u6cd5<\/h3>\n\n\n\n<pre class=\"wp-block-code\"><code>from&nbsp;attention.BAM&nbsp;import&nbsp;BAMBlock<br>import&nbsp;torch<br><br>input=torch.randn(50,512,7,7)<br>bam&nbsp;=&nbsp;BAMBlock(channel=512,reduction=16,dia_val=2)<br>output=bam(input)<br>print(output.shape)<\/code><\/pre>\n\n\n\n<h2>7. ECA Attention<\/h2>\n\n\n\n<h3>7.1. \u5f15\u7528<\/h3>\n\n\n\n<p>ECA-Net: Efficient Channel Attention for Deep Convolutional Neural Networks&#8212;CVPR2020<\/p>\n\n\n\n<p>\u8bba\u6587\u5730\u5740\uff1ahttps:\/\/arxiv.org\/pdf\/1910.03151.pdf<\/p>\n\n\n\n<h3>7.2. \u6a21\u578b\u7ed3\u6784<\/h3>\n\n\n\n<figure class=\"wp-block-image\"><img src=\"https:\/\/mmbiz.qpic.cn\/mmbiz_png\/SdQCib1UzF3sd5NLbGfy6Pmeo1j3WtooBaHJDLl9K948HO0u5r99cEQtIMxjnXcf2fHicdmGpWfjvHke6dicTT3jA\/640?wx_fmt=png&amp;wxfrom=5&amp;wx_lazy=1&amp;wx_co=1\" alt=\"\u56fe\u7247\"\/><\/figure>\n\n\n\n<h3>7.3. \u7b80\u4ecb<\/h3>\n\n\n\n<p>\u8fd9\u662fCVPR2020\u7684\u4e00\u7bc7\u6587\u7ae0\u3002<\/p>\n\n\n\n<p>\u5982\u4e0a\u56fe\u6240\u793a\uff0cSE\u5b9e\u73b0\u901a\u9053\u6ce8\u610f\u529b\u662f\u4f7f\u7528\u4e24\u4e2a\u5168\u8fde\u63a5\u5c42\uff0c\u800cECA\u662f\u9700\u8981\u4e00\u4e2a\u7684\u5377\u79ef\u3002\u4f5c\u8005\u8fd9\u4e48\u505a\u7684\u539f\u56e0\u4e00\u65b9\u9762\u662f\u8ba4\u4e3a\u8ba1\u7b97\u6240\u6709\u901a\u9053\u4e24\u4e24\u4e4b\u95f4\u7684\u6ce8\u610f\u529b\u662f\u6ca1\u6709\u5fc5\u8981\u7684\uff0c\u53e6\u4e00\u65b9\u9762\u662f\u7528\u4e24\u4e2a\u5168\u8fde\u63a5\u5c42\u786e\u5b9e\u5f15\u5165\u4e86\u592a\u591a\u7684\u53c2\u6570\u548c\u8ba1\u7b97\u91cf\u3002<\/p>\n\n\n\n<p>\u56e0\u6b64\u4f5c\u8005\u8fdb\u884c\u4e86AvgPool\u4e4b\u540e\uff0c\u53ea\u662f\u4f7f\u7528\u4e86\u4e00\u4e2a\u611f\u53d7\u91ce\u4e3ak\u7684\u4e00\u7ef4\u5377\u79ef\uff08\u76f8\u5f53\u4e8e\u53ea\u8ba1\u7b97\u4e0e\u76f8\u90bbk\u4e2a\u901a\u9053\u7684\u6ce8\u610f\u529b\uff09\uff0c\u8fd9\u6837\u505a\u5c31\u5927\u5927\u7684\u51cf\u5c11\u7684\u53c2\u6570\u548c\u8ba1\u7b97\u91cf\u3002(i.e.\u76f8\u5f53\u4e8eSE\u662f\u4e00\u4e2aglobal\u7684\u6ce8\u610f\u529b\uff0c\u800cECA\u662f\u4e00\u4e2alocal\u7684\u6ce8\u610f\u529b)\u3002<\/p>\n\n\n\n<h3>7.4. \u4f7f\u7528\u65b9\u6cd5\uff1a<\/h3>\n\n\n\n<pre class=\"wp-block-code\"><code>from&nbsp;attention.ECAAttention&nbsp;import&nbsp;ECAAttention<br>import&nbsp;torch<br><br>input=torch.randn(50,512,7,7)<br>eca&nbsp;=&nbsp;ECAAttention(kernel_size=3)<br>output=eca(input)<br>print(output.shape)<\/code><\/pre>\n\n\n\n<h2>8. DANet Attention<\/h2>\n\n\n\n<h3>8.1. \u5f15\u7528<\/h3>\n\n\n\n<p>Dual Attention Network for Scene Segmentation&#8212;CVPR2019<\/p>\n\n\n\n<p>\u8bba\u6587\u5730\u5740\uff1ahttps:\/\/arxiv.org\/pdf\/1809.02983.pdf<\/p>\n\n\n\n<h3>8.2. \u6a21\u578b\u7ed3\u6784<\/h3>\n\n\n\n<figure class=\"wp-block-image\"><img src=\"https:\/\/mmbiz.qpic.cn\/mmbiz_jpg\/SdQCib1UzF3sd5NLbGfy6Pmeo1j3WtooBab4uU8QGqiabSiaId1sScKCE5siawdfrGMU2fWKyc0J0icCUibCCxjebzxg\/640?wx_fmt=jpeg&amp;wxfrom=5&amp;wx_lazy=1&amp;wx_co=1\" alt=\"\u56fe\u7247\"\/><\/figure>\n\n\n\n<figure class=\"wp-block-image\"><img src=\"https:\/\/mmbiz.qpic.cn\/mmbiz_png\/SdQCib1UzF3sd5NLbGfy6Pmeo1j3WtooBkpTtwnsyIVUg3KpNHBXNdtP8I6IdAfWicxkgzCSy9gqtHXZyicyUyLlg\/640?wx_fmt=png&amp;wxfrom=5&amp;wx_lazy=1&amp;wx_co=1\" alt=\"\u56fe\u7247\"\/><figcaption>\u56fe\u793a, \u793a\u610f\u56fe &nbsp;\u63cf\u8ff0\u5df2\u81ea\u52a8\u751f\u6210<\/figcaption><\/figure>\n\n\n\n<h3>8.3. \u7b80\u4ecb<\/h3>\n\n\n\n<p>\u8fd9\u662fCVPR2019\u7684\u6587\u7ae0\uff0c\u601d\u60f3\u4e0a\u975e\u5e38\u7b80\u5355\uff0c\u5c31\u662f\u5c06self-attention\u7528\u5230\u573a\u666f\u5206\u5272\u7684\u4efb\u52a1\u4e2d\uff0c\u4e0d\u540c\u7684\u662fself-attention\u662f\u5173\u6ce8\u6bcf\u4e2aposition\u4e4b\u95f4\u7684\u6ce8\u610f\u529b\uff0c\u800c\u672c\u6587\u5c06self-attention\u505a\u4e86\u4e00\u4e2a\u62d3\u5c55\uff0c\u8fd8\u505a\u4e86\u4e00\u4e2a\u901a\u9053\u6ce8\u610f\u529b\u7684\u5206\u652f\uff0c\u64cd\u4f5c\u4e0a\u548cself-attention\u4e00\u6837\uff0c\u4e0d\u540c\u7684\u901a\u9053attention\u4e2d\u628a\u751f\u6210Q\uff0cK\uff0cV\u7684\u4e09\u4e2aLinear\u53bb\u6389\u4e86\u3002\u6700\u540e\u5c06\u4e24\u4e2aattention\u4e4b\u540e\u7684\u7279\u5f81\u8fdb\u884celement-wise sum\u3002<\/p>\n\n\n\n<h3>8.4. \u4f7f\u7528\u65b9\u6cd5<\/h3>\n\n\n\n<pre class=\"wp-block-code\"><code>from&nbsp;attention.DANet&nbsp;import&nbsp;DAModule<br>import&nbsp;torch<br><br>input=torch.randn(50,512,7,7)<br>danet=DAModule(d_model=512,kernel_size=3,H=7,W=7)<br>print(danet(input).shape)<\/code><\/pre>\n\n\n\n<h2>9. Pyramid Split Attention(PSA)<\/h2>\n\n\n\n<h3>9.1. \u5f15\u7528<\/h3>\n\n\n\n<p>EPSANet: An Efficient Pyramid Split Attention Block on Convolutional Neural Network&#8212;arXiv 2021.05.30<\/p>\n\n\n\n<p>\u8bba\u6587\u5730\u5740\uff1ahttps:\/\/arxiv.org\/pdf\/2105.14447.pdf<\/p>\n\n\n\n<h3>9.2. \u6a21\u578b\u7ed3\u6784<\/h3>\n\n\n\n<figure class=\"wp-block-image\"><img src=\"https:\/\/mmbiz.qpic.cn\/mmbiz_png\/SdQCib1UzF3sd5NLbGfy6Pmeo1j3WtooBYDdES96Q5mhBZAztCGFA5Q1Gjiafr2Bd8G1kU4fxvLzIv550Y3BG6qQ\/640?wx_fmt=png&amp;wxfrom=5&amp;wx_lazy=1&amp;wx_co=1\" alt=\"\u56fe\u7247\"\/><\/figure>\n\n\n\n<figure class=\"wp-block-image\"><img src=\"https:\/\/mmbiz.qpic.cn\/mmbiz_png\/SdQCib1UzF3sd5NLbGfy6Pmeo1j3WtooBI1Gd5xI0uYdTBoGWoDjhgbOt8U0bL0pxtIiaCwPyH8aQMsYl96dNlUA\/640?wx_fmt=png&amp;wxfrom=5&amp;wx_lazy=1&amp;wx_co=1\" alt=\"\u56fe\u7247\"\/><\/figure>\n\n\n\n<h3>9.3. \u7b80\u4ecb<\/h3>\n\n\n\n<p>\u8fd9\u662f\u6df1\u59275\u670830\u65e5\u5728arXiv\u4e0a\u4e0a\u4f20\u7684\u4e00\u7bc7\u6587\u7ae0\uff0c\u672c\u6587\u7684\u76ee\u7684\u662f\u5982\u4f55\u83b7\u53d6\u5e76\u63a2\u7d22\u4e0d\u540c\u5c3a\u5ea6\u7684\u7a7a\u95f4\u4fe1\u606f\u6765\u4e30\u5bcc\u7279\u5f81\u7a7a\u95f4\u3002\u7f51\u7edc\u7ed3\u6784\u76f8\u5bf9\u6765\u8bf4\u4e5f\u6bd4\u8f83\u7b80\u5355\uff0c\u4e3b\u8981\u5206\u6210\u56db\u6b65\uff0c\u7b2c\u4e00\u6b65\uff0c\u5c06\u539f\u6765\u7684feature\u6839\u636e\u901a\u9053\u5206\u6210n\u7ec4\u7136\u540e\u5bf9\u4e0d\u540c\u7684\u7ec4\u8fdb\u884c\u4e0d\u540c\u5c3a\u5ea6\u7684\u5377\u79ef\uff0c\u5f97\u5230\u65b0\u7684\u7279\u5f81W1\uff1b\u7b2c\u4e8c\u6b65\uff0c\u7528SE\u5728\u539f\u6765\u7684\u7279\u5f81\u4e0a\u8fdb\u884cSE\uff0c\u4ece\u800c\u83b7\u5f97\u4e0d\u540c\u7684\u963f\u5934\u75bc\u6258\u5c3c\uff1b\u7b2c\u4e09\u6b65\uff0c\u5bf9\u4e0d\u540c\u7ec4\u8fdb\u884cSOFTMAX\uff1b\u7b2c\u56db\u6b65\uff0c\u5c06\u83b7\u5f97attention\u4e0e\u539f\u6765\u7684\u7279\u5f81W1\u76f8\u4e58\u3002<\/p>\n\n\n\n<h3>9.4. \u4f7f\u7528\u65b9\u6cd5<\/h3>\n\n\n\n<pre class=\"wp-block-code\"><code>from&nbsp;attention.PSA&nbsp;import&nbsp;PSAimport&nbsp;torchinput=torch.randn(50,512,7,7)psa&nbsp;=&nbsp;PSA(channel=512,reduction=8)output=psa(input)print(output.shape)<\/code><\/pre>\n\n\n\n<h2>10. Efficient Multi-Head Self-Attention(EMSA)<\/h2>\n\n\n\n<h3>10.1. \u5f15\u7528<\/h3>\n\n\n\n<p>ResT: An Efficient Transformer for Visual Recognition&#8212;arXiv 2021.05.28<\/p>\n\n\n\n<p>\u8bba\u6587\u5730\u5740\uff1ahttps:\/\/arxiv.org\/abs\/2105.13677<\/p>\n\n\n\n<h3>10.2. \u6a21\u578b\u7ed3\u6784<\/h3>\n\n\n\n<figure class=\"wp-block-image\"><img src=\"https:\/\/mmbiz.qpic.cn\/mmbiz_png\/SdQCib1UzF3sd5NLbGfy6Pmeo1j3WtooBAphHqgwWPnoSIeUh1hG6zs0xXCWcnWYsFWTLAbNmAeJ89bj4herg1w\/640?wx_fmt=png&amp;wxfrom=5&amp;wx_lazy=1&amp;wx_co=1\" alt=\"\u56fe\u7247\"\/><\/figure>\n\n\n\n<h3>10.3. \u7b80\u4ecb<\/h3>\n\n\n\n<p>\u8fd9\u662f\u5357\u59275\u670828\u65e5\u5728arXiv\u4e0a\u4e0a\u4f20\u7684\u4e00\u7bc7\u6587\u7ae0\u3002\u672c\u6587\u89e3\u51b3\u7684\u4e3b\u8981\u662fSA\u7684\u4e24\u4e2a\u75db\u70b9\u95ee\u9898\uff1a\uff081\uff09Self-Attention\u7684\u8ba1\u7b97\u590d\u6742\u5ea6\u548cn\uff08n\u4e3a\u7a7a\u95f4\u7ef4\u5ea6\u5927\u5c0f\uff09\u5448\u5e73\u65b9\u5173\u7cfb\uff1b\uff082\uff09\u6bcf\u4e2ahead\u53ea\u6709q,k,v\u7684\u90e8\u5206\u4fe1\u606f\uff0c\u5982\u679cq,k,v\u7684\u7ef4\u5ea6\u592a\u5c0f\uff0c\u90a3\u4e48\u5c31\u4f1a\u5bfc\u81f4\u83b7\u53d6\u4e0d\u5230\u8fde\u7eed\u7684\u4fe1\u606f\uff0c\u4ece\u800c\u5bfc\u81f4\u6027\u80fd\u635f\u5931\u3002\u8fd9\u7bc7\u6587\u7ae0\u7ed9\u51fa\u7684\u601d\u8def\u4e5f\u975e\u5e38\u7b80\u5355\uff0c\u5728SA\u4e2d\uff0c\u5728FC\u4e4b\u524d\uff0c\u7528\u4e86\u4e00\u4e2a\u5377\u79ef\u6765\u964d\u4f4e\u4e86\u7a7a\u95f4\u7684\u7ef4\u5ea6\uff0c\u4ece\u800c\u5f97\u5230\u7a7a\u95f4\u7ef4\u5ea6\u4e0a\u66f4\u5c0f\u7684K\u548cV\u3002<\/p>\n\n\n\n<h3>10.4. \u4f7f\u7528\u65b9\u6cd5<\/h3>\n\n\n\n<pre class=\"wp-block-code\"><code>from&nbsp;attention.EMSA&nbsp;import&nbsp;EMSAimport&nbsp;torchfrom&nbsp;torch&nbsp;import&nbsp;nnfrom&nbsp;torch.nn&nbsp;import&nbsp;functional&nbsp;as&nbsp;Finput=torch.randn(50,64,512)emsa&nbsp;=&nbsp;EMSA(d_model=512,&nbsp;d_k=512,&nbsp;d_v=512,&nbsp;h=8,H=8,W=8,ratio=2,apply_transform=True)output=emsa(input,input,input)print(output.shape)<\/code><\/pre>\n","protected":false},"excerpt":{"rendered":"<p>\u8f6c\u8f7d\u81ea\u4f5c\u8005\uff1amayiwei1998\u672c\u6587\u8f6c\u8f7d\u81ea\uff1aGiantPandaCV\u539f\u6587\u94fe\u63a5\uff1a\u6df1\u5ea6\u5b66\u4e60\u4e2d\u7684Attention &hellip; <a href=\"http:\/\/139.9.1.231\/index.php\/2022\/05\/09\/attention\/\" class=\"more-link\">\u7ee7\u7eed\u9605\u8bfb<span class=\"screen-reader-text\">\u6df1\u5ea6\u5b66\u4e60\u4e2d\u7684 Attention \u673a\u5236\u603b\u7ed3\u4e0e\u4ee3\u7801\u5b9e\u73b0\uff082017-2021\u5e74\uff09<\/span><\/a><\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":[],"categories":[4,12],"tags":[],"_links":{"self":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts\/4245"}],"collection":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/comments?post=4245"}],"version-history":[{"count":9,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts\/4245\/revisions"}],"predecessor-version":[{"id":4468,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts\/4245\/revisions\/4468"}],"wp:attachment":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/media?parent=4245"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/categories?post=4245"},{"taxonomy":"post_tag","embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/tags?post=4245"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}