{"id":5388,"date":"2022-08-09T21:26:28","date_gmt":"2022-08-09T13:26:28","guid":{"rendered":"http:\/\/139.9.1.231\/?p=5388"},"modified":"2022-08-09T22:56:04","modified_gmt":"2022-08-09T14:56:04","slug":"few-shot-learning-fsl","status":"publish","type":"post","link":"http:\/\/139.9.1.231\/index.php\/2022\/08\/09\/few-shot-learning-fsl\/","title":{"rendered":"Few-Shot Learning (FSL): \u5c0f\u6837\u672c\u5b66\u4e60\u7b80\u4ecb\u53ca\u5176\u5e94\u7528"},"content":{"rendered":"\n<p class=\"has-light-pink-background-color has-background\">\u6458\u81ea: <a href=\"https:\/\/research.aimultiple.com\/few-shot-learning\/\" target=\"_blank\" rel=\"noreferrer noopener\">https:\/\/research.aimultiple.com\/few-shot-learning\/<\/a><\/p>\n\n\n\n<p class=\"has-light-pink-background-color has-background\">\u8bba\u6587 \uff1aA Survey on Few-Shot Learning\uff1a <a href=\"https:\/\/arxiv.org\/abs\/1904.05046\">https:\/\/arxiv.org\/abs\/1904.05046<\/a><\/p>\n\n\n\n<p class=\"has-light-pink-background-color has-background\">wss\u4ecb\u7ecd\u89c6\u9891\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/www.youtube.com\/c\/ShusenWang\" target=\"_blank\">https:\/\/www.youtube.com\/c\/ShusenWang<\/a><\/p>\n\n\n\n<p class=\"has-light-pink-background-color has-background\">\u8bfe\u4ef6\uff1a<a href=\"https:\/\/github.com\/wangshusen\/DeepLearning\">https:\/\/github.com\/wangshusen\/DeepLearning<\/a><\/p>\n\n\n\n<p>&nbsp;      \u5982\u679c\u624b\u673a\u9700\u8981\u6210\u5343\u4e0a\u4e07\u5f20\u7167\u7247\u6765\u8bad\u7ec3\u624d\u80fd\u8fdb\u884c\u4eba\u8138\u8bc6\u522b\u89e3\u9501\uff0c\u8fd9\u662f\u5f88\u4e0d\u53cb\u597d\u7684\u3002\u5728\u673a\u5668\u5b66\u4e60\u5e94\u7528\u9886\u57df\uff0c<a rel=\"noreferrer noopener\" href=\"https:\/\/so.csdn.net\/so\/search?q=%E5%B0%8F%E6%A0%B7%E6%9C%AC&amp;spm=1001.2101.3001.7020\" target=\"_blank\">\u5c0f\u6837\u672c<\/a>\u5b66\u4e60(Few-shot Learning)\uff08\u5728\u521a\u521a\u63cf\u8ff0\u7684\u60c5\u51b5\u4e0b\u79f0\u4e3a\u5355\u6837\u672c\u5b66\u4e60(one-shot learning)\uff09\u662f\u4e00\u4e2a\u70ed\u95e8\u8bdd\u9898\uff0c\u5b83\u80fd\u591f\u57fa\u4e8e\u5c11\u91cf\u7684\u8bad\u7ec3\u6837\u672c\u53bb\u9884\u6d4b\u3002\u672c\u6587\u5c06\u8ba8\u8bba\u4ee5\u4e0b\u51e0\u4e2a\u65b9\u9762\uff1a<\/p>\n\n\n\n<ul><li>\u4ec0\u4e48\u662f\u5c11\u6837\u672c\u5b66\u4e60\uff08FSL\uff09\uff1f<\/li><li>\u5b83\u4e3a\u4ec0\u4e48\u5982\u6b64\u91cd\u8981\uff1f<\/li><li>\u5c11\u6837\u672c\u5b66\u4e60\u6709\u54ea\u4e9b\u5e94\u7528\uff1f<\/li><li>\u5b83\u662f\u5982\u4f55\u5de5\u4f5c\u7684\uff1f<\/li><li>\u5c11\u6837\u672c\u5b66\u4e60\u548c\u96f6\u6837\u672c\u5b66\u4e60\u6709\u4ec0\u4e48\u533a\u522b\uff1f<\/li><li>\u5c11\u6837\u672c\u5b66\u4e60\u6709\u54ea\u4e9b\u4e0d\u540c\u7684\u65b9\u6cd5\uff1f<\/li><li>\u5b83\u662f\u5982\u4f55\u5728 Python \u4e2d\u5b9e\u73b0\u7684\uff1f<\/li><li>\u673a\u5668\u5b66\u4e60\u7684\u672a\u6765<\/li><\/ul>\n\n\n\n<p class=\"has-light-pink-background-color has-background\">case\uff1a\u4ee5\u76f8\u4f3c\u5ea6\u51fd\u6570\u6765\u8fdb\u884c\u56fe\u7247\u5206\u7c7b\uff1a<\/p>\n\n\n\n<p>\u8bad\u7ec3\uff1a\u53ef\u4ee5\u5728\u5927\u89c4\u6a21\u6570\u636e\u96c6\u4e2d\u5b66\u4e60\u4e0d\u540c\u7c7b\u522b\u7684\u76f8\u4f3c\u6027\uff0c\u4f7f\u5f97\u540c\u4e00\u7c7b\u7684\u76f8\u4f3c\u5ea6\u9ad8\uff0c\u4e0d\u540c\u7c7b\u522b\u76f8\u4f3c\u5ea6\u4f4e\u3002<\/p>\n\n\n\n<p>\u6d4b\u8bd5\uff1a\u8f93\u5165query\uff08\u6d4b\u8bd5\u56fe\u7247\uff09\u548c surport set\uff08\u5e26\u6807\u7b7e\u7684\u56fe\u7247\uff0c\u8981\u8fdb\u884c\u6bd4\u8f83\u7684\u4e0d\u540c\u7c7b\u522b\u7684\u6570\u636e\u96c6\u4e0d\u7b49\u4e8e\u8bad\u7ec3\u96c6\uff09\u76ee\u7684\u5c31\u662f\u8981\u8ba9\u6a21\u578b\u8bc6\u522bquery\u548c surport set \u4e2d\u90a3\u4e2a\u66f4\u76f8\u4f3c\u3002<\/p>\n\n\n\n<figure class=\"wp-block-image size-large\"><img loading=\"lazy\" width=\"1024\" height=\"256\" src=\"http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-71-1024x256.png\" alt=\"\" class=\"wp-image-5452\" srcset=\"http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-71-1024x256.png 1024w, http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-71-300x75.png 300w, http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-71-768x192.png 768w, http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-71-1536x384.png 1536w, http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-71.png 1716w\" sizes=\"(max-width: 1024px) 100vw, 1024px\" \/><\/figure>\n\n\n\n<h2 class=\"has-light-pink-background-color has-background\" id=\"1.%20%E4%BB%80%E4%B9%88%E6%98%AF%E5%B0%8F%E6%A0%B7%E6%9C%AC%E5%AD%A6%E4%B9%A0%EF%BC%9F\">1. \u4ec0\u4e48\u662f\u5c0f\u6837\u672c\u5b66\u4e60\uff1f<\/h2>\n\n\n\n<p>&nbsp; &nbsp; &nbsp; &nbsp; \u5c0f\u6837\u672c\u5b66\u4e60(Few-shot learning, FSL)\uff0c\u5728\u5c11\u6570\u8d44\u6599\u4e2d\u4e5f\u88ab\u79f0\u4e3alow-shot learning(LSL)\u3002\u5c0f\u6837\u672c\u5b66\u4e60\u662f\u4e00\u79cd\u8bad\u7ec3\u6570\u636e\u96c6\u5305\u542b\u6709\u9650\u4fe1\u606f\u7684\u673a\u5668\u5b66\u4e60\u95ee\u9898\u3002<\/p>\n\n\n\n<p>&nbsp; &nbsp; &nbsp; &nbsp; \u5bf9\u4e8e\u673a\u5668\u5b66\u4e60\u5e94\u7528\u6765\u8bf4\uff0c\u901a\u5e38\u7684\u505a\u6cd5\u662f\u63d0\u4f9b\u5c3d\u53ef\u80fd\u591a\u7684\u6570\u636e\u3002\u8fd9\u662f\u56e0\u4e3a\u5728\u5927\u591a\u6570\u673a\u5668\u5b66\u4e60\u5e94\u7528\u4e2d\uff0c\u8f93\u5165\u66f4\u591a\u7684\u6570\u636e\u8bad\u7ec3\u80fd\u4f7f\u6a21\u578b\u7684\u9884\u6d4b\u6548\u679c\u66f4\u597d\u3002\u7136\u800c\uff0c\u5c0f\u6837\u672c\u5b66\u4e60\u7684\u76ee\u6807\u662f\u4f7f\u7528\u6570\u91cf\u8f83\u5c11\u7684\u8bad\u7ec3\u96c6\u6765\u6784\u5efa\u51c6\u786e\u7684\u673a\u5668\u5b66\u4e60\u6a21\u578b\u3002\u7531\u4e8e\u8f93\u5165\u6570\u636e\u7684\u7ef4\u5ea6\u662f\u4e00\u4e2a\u51b3\u5b9a\u8d44\u6e90\u6d88\u8017\u6210\u672c(\u5982\uff0c\u65f6\u95f4\u6210\u672c\uff0c\u8ba1\u7b97\u6210\u672c\u7b49)\u7684\u56e0\u7d20\uff0c\u6211\u4eec\u53ef\u4ee5\u901a\u8fc7\u4f7f\u7528\u5c0f\u6837\u672c\u5b66\u4e60\u6765\u964d\u4f4e\u6570\u636e\u5206\u6790\/\u673a\u5668\u5b66\u4e60\u6d88\u8017\u6210\u672c\u3002<\/p>\n\n\n\n<h2 class=\"has-light-pink-background-color has-background\" id=\"2.%20%E5%B0%8F%E6%A0%B7%E6%9C%AC%E5%AD%A6%E4%B9%A0%E4%B8%BA%E4%BB%80%E4%B9%88%E9%87%8D%E8%A6%81%C2%A0%EF%BC%9F\">2. \u5c0f\u6837\u672c\u5b66\u4e60\u4e3a\u4ec0\u4e48\u91cd\u8981&nbsp;\uff1f<\/h2>\n\n\n\n<ul><li><strong>\u7c7b\u4f3c\u4eba\u7684\u5b66\u4e60\u65b9\u5f0f<\/strong>\uff1a\u4eba\u5728\u770b\u8fc7\u5c11\u91cf\u4f8b\u5b50\u540e\u5c31\u53ef\u4ee5\u8ba4\u51fa\u624b\u5199\u5b57\u7b26\u4e4b\u95f4\u7684\u4e0d\u540c\u3002\u7136\u800c\uff0c\u8ba1\u7b97\u673a\u9700\u8981\u5927\u91cf\u7684\u6570\u636e\u53bb\u201c\u5206\u7c7b\u201d\u5b83\u770b\u5230\u7684\u4e1c\u897f\uff0c\u5e76\u8bc6\u522b\u51fa\u624b\u5199\u5b57\u7b26\u4e4b\u95f4\u7684\u4e0d\u540c\u3002\u5c0f\u6837\u672c\u5b66\u4e60\u662f\u4e00\u79cdtest base\u7684\u65b9\u6cd5\uff0c\u6211\u4eec\u671f\u671b\u5b83\u80fd\u50cf\u4eba\u4e00\u6837\u4ece\u5c11\u91cf\u7684\u6837\u672c\u4e2d\u5b66\u4e60\u3002<\/li><li><strong>\u7a00\u6709\u6837\u672c\u5b66\u4e60<\/strong>\uff1a\u5c0f\u6837\u672c\u5b66\u4e60\u80fd\u7528\u4e8e\u7a00\u6709\u6837\u672c\u7684\u5b66\u4e60\u3002\u4f8b\u5982\uff0c\u5f53\u5bf9\u52a8\u7269\u56fe\u7247\u8fdb\u884c\u5206\u7c7b\u65f6\uff0c\u7528\u5c0f\u6837\u672c\u5b66\u4e60\u8bad\u7ec3\u7684\u673a\u5668\u5b66\u4e60\u6a21\u578b\uff0c\u5728\u53ea\u5f97\u5230\u5c11\u91cf\u7684\u5148\u9a8c\u4fe1\u606f\u540e\uff0c\u53ef\u4ee5\u6b63\u786e\u5730\u5bf9\u7a00\u6709\u6837\u672c\u7684\u56fe\u50cf\u8fdb\u884c\u5206\u7c7b\u3002<\/li><li>\u964d\u4f4e\u6570\u636e\u6536\u96c6\u548c\u8ba1\u7b97\u6210\u672c\uff1a\u7531\u4e8e\u5c0f\u6837\u672c\u5b66\u4e60\u4ec5\u9700\u8981\u5c11\u91cf\u7684\u6570\u636e\u6765\u8bad\u7ec3\u6a21\u578b\uff0c\u6d88\u9664\u4e86\u6570\u636e\u6536\u96c6\u548c\u6807\u8bb0\u76f8\u5173\u7684\u9ad8\u6210\u672c\u3002\u8bad\u7ec3\u6570\u636e\u91cf\u5c11\u610f\u5473\u7740\u8bad\u7ec3\u6570\u636e\u96c6\u7684\u7ef4\u6570\u4f4e\uff0c\u8fd9\u53ef\u4ee5\u663e\u7740\u964d\u4f4e\u8ba1\u7b97\u6210\u672c\u3002<\/li><\/ul>\n\n\n\n<h2 class=\"has-light-pink-background-color has-background\" id=\"3.%20%E5%B0%8F%E6%A0%B7%E6%9C%AC%E5%AD%A6%E4%B9%A0(Few-shot%20Learning)%E5%92%8C%E9%9B%B6%E6%A0%B7%E6%9C%AC%E5%AD%A6%E4%B9%A0(Zero-shot%20Learning)%E7%9A%84%E5%8C%BA%E5%88%AB%C2%A0\">3. \u5c0f\u6837\u672c\u5b66\u4e60(Few-shot Learning)\u548c\u96f6\u6837\u672c\u5b66\u4e60(Zero-shot Learning)\u7684\u533a\u522b&nbsp;<\/h2>\n\n\n\n<p>&nbsp;      \u5c0f\u6837\u672c\u5b66\u4e60\u7684\u76ee\u7684\u662f\u5728\u6709\u5c11\u91cf\u8bad\u7ec3\u6570\u636e\u7684\u60c5\u51b5\u4e0b\u80fd\u83b7\u5f97\u51c6\u786e\u5206\u7c7b\u6d4b\u8bd5\u6837\u672c\u7684\u6a21\u578b\u3002\u96f6\u6837\u672c\u5b66\u4e60\u7684\u76ee\u7684\u662f\u9884\u6d4b\u8bad\u7ec3\u6570\u636e\u96c6\u4e2d\u6ca1\u6709\u51fa\u73b0\u8fc7\u7684\u7c7b\u522b\u3002\u96f6\u6837\u672c\u5b66\u4e60\u548c\u5c0f\u6837\u672c\u5b66\u4e60\u6709\u5f88\u591a\u5171\u540c\u7684\u5e94\u7528\uff0c\u4f8b\u5982\uff1a<\/p>\n\n\n\n<ul><li>\u56fe\u50cf\u5206\u7c7b(image classification)<\/li><li>\u8bed\u4e49\u5206\u5272(semantic segmentation)<\/li><li>\u56fe\u50cf\u751f\u6210(image generation)<\/li><li>\u76ee\u6807\u68c0\u6d4b(object detection)<\/li><li>\u81ea\u7136\u8bed\u8a00\u5904\u7406(natural language processing)<\/li><\/ul>\n\n\n\n<p>        \u8fd8\u6709\u4e00\u79cd\u53eb\u5355\u6837\u672c\u5b66\u4e60(one-shot learning)\u7684\uff0c\u5b83\u7ecf\u5e38\u4f1a\u548c\u96f6\u6837\u672c\u5b66\u4e60\u6df7\u5728\u4e00\u8d77\u3002\u5355\u6837\u672c\u5b66\u4e60\u662f\u5c0f\u6837\u672c\u5b66\u4e60\u95ee\u9898\u7684\u4e00\u4e2a\u7279\u4f8b\uff0c\u5b83\u7684\u76ee\u7684\u662f\u4ece\u4e00\u4e2a\u8bad\u7ec3\u6837\u672c\u6216\u56fe\u7247\u4e2d\u5b66\u4e60\u5230\u6709\u5173\u7269\u4f53\u7c7b\u522b\u7684\u4fe1\u606f\u3002<strong>\u5355\u6837\u672c\u5b66\u4e60\u7684\u4e00\u4e2a\u4f8b\u5b50\u662f\uff0c\u667a\u80fd\u624b\u673a\u4e2d\u4f7f\u7528\u7684\u4eba\u8138\u8bc6\u522b\u6280\u672f\u3002<\/strong><\/p>\n\n\n\n<h2 class=\"has-light-pink-background-color has-background\" id=\"4.%20%E5%B0%8F%E6%A0%B7%E6%9C%AC%E5%AD%A6%E4%B9%A0%E7%9A%84%E6%96%B9%E6%B3%95\"><a><\/a>4. \u5c0f\u6837\u672c\u5b66\u4e60\u7684\u65b9\u6cd5<\/h2>\n\n\n\n<ul class=\"has-dark-gray-color has-light-gray-background-color has-text-color has-background\"><li><strong>\u5173\u4e8e\u76f8\u4f3c\u6027\u7684\u5148\u9a8c\u77e5\u8bc6<\/strong>\uff1a\u673a\u5668\u5b66\u4e60\u6a21\u578b\u5728\u8bad\u7ec3\u6570\u636e\uff08\u53ef\u4ee5\u662f\u5728\u5927\u89c4\u6a21\u6570\u636e\u96c6\uff09\u4e2d\u5b66\u4e60\u6a21\u5f0f(patterns)\uff0c\u8fd9\u4e9b\u6a21\u5f0f\u503e\u5411\u4e8e\u5206\u79bb\u4e0d\u540c\u7684\u7c7b\uff0c\u5373\u4f7f\u662f\u6ca1\u6709\u89c1\u8fc7\u7684\u6570\u636e\u3002\u4f20\u7edf\u673a\u5668\u5b66\u4e60\u6a21\u578b\u4e0d\u80fd\u5206\u51fa\u6ca1\u6709\u5728\u8bad\u7ec3\u6570\u636e\u96c6\u4e2d\u51fa\u73b0\u8fc7\u7684\u7c7b\u3002\u7136\u800c\uff0c\u5bf9\u4e8e\u5c0f\u6837\u672c\u5b66\u4e60\u6280\u672f\uff0c\u5b83\u80fd\u4f7f\u673a\u5668\u5b66\u4e60\u6a21\u578b\u5bf9\u6ca1\u6709\u5728\u8bad\u7ec3\u96c6\u4e2d\u51fa\u73b0\u7684\u7c7b\u522b\u8fdb\u884c\u5206\u7c7b\u3002<ul><li><strong>\u4e8c\u5206\u7c7b\u5224\u522b<\/strong><\/li><\/ul><ol><li><a rel=\"noreferrer noopener\" href=\"https:\/\/www.cs.utoronto.ca\/~gkoch\/files\/msc-thesis.pdf\" target=\"_blank\">\u5b6a\u751f\u7f51\u7edc(Siamese Networks)<\/a>\u3002                                                                                       Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/tensorfreitas\/Siamese-Networks-for-One-Shot-Learning\" target=\"_blank\">https:\/\/github.com\/tensorfreitas\/Siamese-Networks-for-One-Shot-Learning<\/a><\/li><\/ol><ol><li><a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/pdf\/1412.6622.pdf\" target=\"_blank\">\u4e09\u80de\u80ce\u7f51\u7edc(Triplet Networks)<\/a>\u3002                                                                                                Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/asparagus\/triplet-net\" target=\"_blank\">https:\/\/github.com\/asparagus\/triplet-net<\/a><\/li><\/ol><ul><li><strong>\u591a\u5206\u7c7b\u5224\u522b<\/strong><\/li><\/ul><ol><li><a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/pdf\/1606.04080.pdf\" target=\"_blank\">\u5339\u914d\u7f51\u7edc(Matching Networks)<\/a>\u3002                                                                                                Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/schatty\/matching-networks-tf\" target=\"_blank\">https:\/\/github.com\/schatty\/matching-networks-tf<\/a><\/li><li><a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/pdf\/1703.05175.pdf\" target=\"_blank\">\u539f\u5f62\u7f51\u7edc(Prototypical Network)<\/a>\u3002                                                                                          Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/jakesnell\/prototypical-networks\" target=\"_blank\">https:\/\/github.com\/jakesnell\/prototypical-networks<\/a><\/li><li><a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/pdf\/1706.01427.pdf\" target=\"_blank\">\u5173\u7cfb\u7f51\u7edc(Relation Networks)<\/a>\u3002                                                                                              Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/mesnico\/RelationNetworks-CLEVR\" target=\"_blank\">https:\/\/github.com\/mesnico\/RelationNetworks-CLEVR<\/a><\/li><\/ol><\/li><li><strong>\u5173\u4e8e\u5b66\u4e60\u7684\u5148\u9a8c\u77e5\u8bc6<\/strong>\uff1a\u673a\u5668\u5b66\u4e60\u6a21\u578b\u5229\u7528\u5148\u9a8c\u77e5\u8bc6\u7ea6\u675f\u5b66\u4e60\u7b97\u6cd5\uff0c\u4ece\u5c11\u91cf\u7684\u6837\u672c\u4e2d\u9009\u62e9\u6cdb\u5316\u826f\u597d\u7684\u53c2\u6570\u3002<ul><li>\u5728\u5c0f\u6837\u672c\u5b66\u4e60\u4e2d\u7528\u4e8e\u8d85\u53c2\u6570\u8c03\u6574\u7684\u6280\u672f\u6709\uff1a<\/li><\/ul><ol><li><a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/pdf\/1710.11622.pdf\" target=\"_blank\">MAML(Model-agnostic Meta-learning)<\/a>\u3002                                                                              Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/cbfinn\/maml\" target=\"_blank\">https:\/\/github.com\/cbfinn\/maml<\/a><\/li><li><a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/pdf\/1703.03400.pdf\" target=\"_blank\">FOMAML(First-order Model-agnostic Meta-learning)<\/a>\u3002                                                    Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/samringer\/FOMAML\/blob\/master\/SineWave.ipynb\" target=\"_blank\">https:\/\/github.com\/samringer\/FOMAML\/blob\/master\/SineWave.ipynb<\/a><\/li><li><a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/pdf\/1803.02999.pdf\" target=\"_blank\">Reptile<\/a>\u3002                                                                                                                                         Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/openai\/supervised-reptile\" target=\"_blank\">https:\/\/github.com\/openai\/supervised-reptile<\/a><\/li><\/ol><ul><li>\u5b66\u4e60\u66f4\u65b0\u89c4\u5219\u4e5f\u53ef\u4ee5\u4f7f\u5c0f\u6570\u636e\u96c6\u83b7\u5f97\u826f\u597d\u6027\u80fd\uff1a<\/li><\/ul><ol><li><a rel=\"noreferrer noopener\" href=\"https:\/\/openreview.net\/pdf?id=rJY0-Kcll\" target=\"_blank\">LSTMs<\/a>\u3002      Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/twitter\/meta-learning-lstm\" target=\"_blank\">https:\/\/github.com\/twitter\/meta-learning-lstm<\/a><\/li><li><a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/pdf\/1606.01885.pdf\" target=\"_blank\">\u5f3a\u5316\u5b66\u4e60(Reinforcement learning)<\/a>\u3002                                                                                      Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/dennybritz\/reinforcement-learning\" target=\"_blank\">https:\/\/github.com\/dennybritz\/reinforcement-learning<\/a><\/li><li><a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/pdf\/1709.07417.pdf\" target=\"_blank\">Optimization rules<\/a>\u3002                                                                                                                   Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/mogolola\/neural_optimizer_search\" target=\"_blank\">https:\/\/github.com\/mogolola\/neural_optimizer_search<\/a><\/li><\/ol><ul><li>\u5e8f\u5217\u65b9\u6cd5\u4f7f\u7528\u6574\u4e2a\u6570\u636e\u96c6\u548c\u6d4b\u8bd5\u793a\u4f8b\uff0c\u5e76\u9884\u6d4b\u6d4b\u8bd5\u6837\u672c\u6807\u7b7e\u7684\u503c\uff1a<\/li><\/ul><ol><li><a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/pdf\/1605.06065.pdf\" target=\"_blank\">Memory-augmented NN<\/a>\u3002                                                                                                               Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/hmishra2250\/NTM-One-Shot-TF\" target=\"_blank\">https:\/\/github.com\/hmishra2250\/NTM-One-Shot-TF<\/a><\/li><li><a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/pdf\/1707.03141.pdf\" target=\"_blank\">SNAIL<\/a>\u3002                                                                                                                                       Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/sagelywizard\/snail\" target=\"_blank\">https:\/\/github.com\/sagelywizard\/snail<\/a><\/li><\/ol><\/li><li>\u5173\u4e8e\u6570\u636e\u7684\u5148\u9a8c\u77e5\u8bc6\uff1a\u673a\u5668\u5b66\u4e60\u6a21\u578b\u5229\u7528\u4e86\u5173\u4e8e\u6570\u636e\u7684\u7ed3\u6784\u548c\u53ef\u53d8\u6027\u7684\u5148\u9a8c\u77e5\u8bc6\uff0c\u8fd9\u4f7f\u5f97\u53ef\u4ee5\u4ece\u5f88\u5c11\u7684\u4f8b\u5b50\u4e2d\u6784\u5efa\u53ef\u884c\u7684\u6a21\u578b\u3002<ul><li>\u751f\u6210\u6a21\u578b\uff1a<\/li><\/ul><ol><li><a rel=\"noreferrer noopener\" href=\"https:\/\/web.mit.edu\/cocosci\/Papers\/Science-2015-Lake-1332-8.pdf\" target=\"_blank\">Pen-stroke models<\/a>\u3002Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/brendenlake\/omniglot\" target=\"_blank\">https:\/\/github.com\/brendenlake\/omniglot<\/a><\/li><li><a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/pdf\/1606.02185.pdf\" target=\"_blank\">Neural statistician<\/a>\u3002Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/conormdurkan\/neural-statistician\" target=\"_blank\">https:\/\/github.com\/conormdurkan\/neural-statistician<\/a><\/li><\/ol><ul><li>\u5408\u6210\u65b0\u7684\u8bad\u7ec3\u96c6\u6837\u672c:<\/li><\/ul><ol><li>Analogies (Facebook AI Research)\u3002Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/facebookresearch\/low-shot-shrink-hallucinate\" target=\"_blank\">https:\/\/github.com\/facebookresearch\/low-shot-shrink-hallucinate<\/a><\/li><li><a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/pdf\/1608.00859.pdf\" target=\"_blank\">End-to-end<\/a>\u3002Python\u5b9e\u73b0\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/yjxiong\/temporal-segment-networks\" target=\"_blank\">https:\/\/github.com\/yjxiong\/temporal-segment-networks<\/a><\/li><\/ol><\/li><\/ul>\n\n\n\n<h2 class=\"has-light-pink-background-color has-background\" id=\"5.%20%E5%B0%8F%E6%A0%B7%E6%9C%AC%E5%AD%A6%E4%B9%A0%E7%9A%84%E5%BA%94%E7%94%A8\">5. \u5c0f\u6837\u672c\u5b66\u4e60\u7684\u5e94\u7528<\/h2>\n\n\n\n<p><strong>5.1 \u8ba1\u7b97\u673a\u89c6\u89c9<\/strong>\uff1a\u8ba1\u7b97\u673a\u89c6\u89c9\u63a2\u7d22\u5982\u4f55\u4ece\u6570\u5b57\u56fe\u50cf\u6216\u89c6\u9891\u4e2d\u83b7\u5f97\u9ad8\u7ea7\u7406\u89e3\u3002\u5c0f\u6837\u672c\u5b66\u4e60\u5728\u8ba1\u7b97\u673a\u89c6\u89c9\u4e2d\u4e3b\u8981\u7528\u4e8e\u5904\u7406\u4ee5\u4e0b\u95ee\u9898\uff1a<\/p>\n\n\n\n<ul><li>\u5b57\u7b26\u8bc6\u522b-Character recognition\u3002NIPS 2016\uff1a<a href=\"https:\/\/arxiv.org\/abs\/1606.05233\" target=\"_blank\" rel=\"noreferrer noopener\">Learning feed-forward one-shot learners<\/a><\/li><li>\u56fe\u50cf\u5206\u7c7b-Image classification\u3002ICML 2017\uff1a<a href=\"https:\/\/arxiv.org\/abs\/1703.03400\" target=\"_blank\" rel=\"noreferrer noopener\">Model-Agnostic Meta-Learning for Fast Adaptation of Deep Networks<\/a><\/li><li>\u7269\u4f53\u8bc6\u522b-Object recognition\u3002<a href=\"https:\/\/dl.acm.org\/doi\/10.5555\/2976040.2976097\" target=\"_blank\" rel=\"noreferrer noopener\">NIPS*<\/a><\/li><li>\u5176\u4ed6\u56fe\u50cf\u5e94\u7528\uff1a<ul><li>\u56fe\u50cf\u68c0\u7d22-image retrieval\u3002<a href=\"https:\/\/arxiv.org\/abs\/1707.02610\" target=\"_blank\" rel=\"noreferrer noopener\">NIPS**<\/a><\/li><li>\u76ee\u6807\u8ddf\u8e2a-object tracking\u3002<a href=\"http:\/\/papers.nips.cc\/paper\/6067-learning-feed-forward-one-shot-learners\" target=\"_blank\" rel=\"noreferrer noopener\">NIPS***<\/a><\/li><li>\u56fe\u50cf\u4e2d\u7279\u5b9a\u7269\u4f53\u8ba1\u6570-specific object counting in images\u3002<a href=\"https:\/\/openaccess.thecvf.com\/content_ECCV_2018\/papers\/Fang_Zhao_Dynamic_Conditional_Networks_ECCV_2018_paper.pdf\" target=\"_blank\" rel=\"noreferrer noopener\">ECCV<\/a><\/li><li>\u573a\u666f\u4f4d\u7f6e\u8bc6\u522b-scene location recognition\u3002<a href=\"https:\/\/openaccess.thecvf.com\/content_cvpr_2016\/papers\/Kwitt_One-Shot_Learning_of_CVPR_2016_paper.pdf\" target=\"_blank\" rel=\"noreferrer noopener\">IEEE<\/a><\/li><li>\u624b\u52bf\u8bc6\u522b-gesture recognition\u3002<a href=\"https:\/\/www.robots.ox.ac.uk\/~vgg\/publications\/2014\/Pfister14\/pfister14.pdf\" target=\"_blank\" rel=\"noreferrer noopener\">Oxford<\/a><\/li><li>part labeling\u3002<a href=\"https:\/\/openaccess.thecvf.com\/content_cvpr_2018\/papers\/Choi_Structured_Set_Matching_CVPR_2018_paper.pdf\" target=\"_blank\" rel=\"noreferrer noopener\">IEEE*<\/a><\/li><li>\u56fe\u50cf\u751f\u6210-image generation\u3002<a href=\"http:\/\/papers.nips.cc\/paper\/6527-conditional-image-generation-with-pixelcnn-decoders\" target=\"_blank\" rel=\"noreferrer noopener\">NIPS****<\/a><\/li><li>\u4e09\u7ef4\u7269\u4f53\u7684\u5f62\u72b6\u89c6\u56fe\u91cd\u5efa-shape view reconstruction for 3D objects\u3002<a href=\"https:\/\/openreview.net\/pdf\/e9d0c9677cdf67034a920a123c97bfbea6e64d5b.pdf\" target=\"_blank\" rel=\"noreferrer noopener\">ICLR<\/a><\/li><li>\u56fe\u50cf\u63cf\u8ff0-image captioning\u3002<a href=\"https:\/\/xuanyidong.com\/resources\/papers\/ACM-MM-18-FPAIT.pdf\" target=\"_blank\" rel=\"noreferrer noopener\">Association for Computing Machinery<\/a><\/li><\/ul><\/li><li>\u89c6\u9891\u5e94\u7528\uff1a<ul><li>\u89c6\u9891\u5206\u7c7b-video classification\u3002<a href=\"https:\/\/openaccess.thecvf.com\/content_ECCV_2018\/papers\/Linchao_Zhu_Compound_Memory_Networks_ECCV_2018_paper.pdf\" target=\"_blank\" rel=\"noreferrer noopener\">ECCV*<\/a><\/li><li>\u52a8\u4f5c\u9884\u6d4b-motion predicion\u3002<a href=\"https:\/\/openaccess.thecvf.com\/content_ECCV_2018\/papers\/Liangyan_Gui_Few-Shot_Human_Motion_ECCV_2018_paper.pdf\" target=\"_blank\" rel=\"noreferrer noopener\">ECCV**<\/a><\/li><li>\u884c\u4e3a\u68c0\u6d4b-action localization\u3002<a href=\"https:\/\/ieeexplore.ieee.org\/document\/8578255\" target=\"_blank\" rel=\"noreferrer noopener\">IEEE**<\/a><\/li><li>\u884c\u4eba\u518d\u8bc6\u522b-person re-identification\u3002<a href=\"https:\/\/ieeexplore.ieee.org\/document\/8578641\" target=\"_blank\" rel=\"noreferrer noopener\">IEEE***<\/a><\/li><li>\u4e8b\u4ef6\u68c0\u6d4b-event detection\u3002<a href=\"https:\/\/www.cs.sfu.ca\/~mori\/research\/papers\/yan-bmvc15.pdf\" target=\"_blank\" rel=\"noreferrer noopener\">British Machine Vision Conference<\/a><\/li><\/ul><\/li><\/ul>\n\n\n\n<p><strong>5.2 \u81ea\u7136\u8bed\u8a00\u5904\u7406<\/strong>\uff1a\u5c0f\u6837\u672c\u5b66\u4e60\u4f7f\u81ea\u7136\u8bed\u8a00\u5904\u7406\u5e94\u7528\u7a0b\u5e8f\u80fd\u591f\u7528\u5f88\u5c11\u7684\u6587\u672c\u6570\u636e\u6837\u672c\u6765\u5b8c\u6210\u4efb\u52a1\u3002\u4f8b\u5982\uff1a<\/p>\n\n\n\n<ul><li>\u53e5\u6cd5\u5206\u6790-parsing\u3002<a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/abs\/1805.06556\" target=\"_blank\">Association for Computational Linguistics<\/a><\/li><li>\u7ffb\u8bd1-translation\u3002<a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/abs\/1703.03129\" target=\"_blank\">ICLR<\/a><\/li><li>\u53e5\u5b50\u586b\u7a7a-sentence completion\u3002<a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/pdf\/1606.04080.pdf\" target=\"_blank\">Google<\/a><\/li><li>\u77ed\u6587\u672c\u7684\u60c5\u611f\u5206\u7c7b-sentiment classification from short reviews\u3002<a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/abs\/1805.07513\" target=\"_blank\">NAACL<\/a><\/li><li>\u5bf9\u8bdd\u7cfb\u7edf\u7684\u7528\u6237\u610f\u56fe\u5206\u7c7b-user intent classification for dialog systems\u3002<a rel=\"noreferrer noopener\" href=\"https:\/\/arxiv.org\/pdf\/1805.07513.pdf\" target=\"_blank\">IBM Research<\/a><\/li><li>\u5211\u4e8b\u6307\u63a7\u9884\u6d4b-criminal charge prediction\u3002\u4ee3\u7801\uff1a<a rel=\"noreferrer noopener\" href=\"https:\/\/github.com\/thunlp\/attribute_charge\" target=\"_blank\">https:\/\/github.com\/thunlp\/attribute_charge<\/a>\u3002<a rel=\"noreferrer noopener\" href=\"https:\/\/www.aclweb.org\/anthology\/C18-1041\/\" target=\"_blank\">Association for Computer Linguistics<\/a><\/li><li>\u5355\u8bcd\u76f8\u4f3c\u5ea6\u4efb\u52a1-word similarity tasks\u3002<a rel=\"noreferrer noopener\" href=\"https:\/\/www.aclweb.org\/anthology\/D17-1030.pdf\" target=\"_blank\">Association for Computer Linguistics*<\/a><\/li><li>\u591a\u6807\u7b7e\u6587\u672c\u5206\u7c7b-multi-label text classification\u3002<a rel=\"noreferrer noopener\" href=\"https:\/\/www.aclweb.org\/anthology\/D18-1352\/\" target=\"_blank\">Association for Computer Linguistics**<\/a><\/li><\/ul>\n\n\n\n<p><strong>5.3&nbsp;\u673a\u5668\u4eba<\/strong>\uff1a\u4e3a\u4e86\u8ba9\u673a\u5668\u4eba\u7684\u884c\u4e3a\u66f4\u50cf\u4eba\u7c7b\uff0c\u5b83\u4eec\u5e94\u8be5\u80fd\u591f\u4ece\u5c11\u91cf\u7684\u793a\u4f8b\u4e2d\u5f52\u7eb3\u51fa\u4fe1\u606f\u3002\u56e0\u6b64\uff0c\u5c0f\u6837\u672c\u5b66\u4e60\u5728\u8bad\u7ec3\u673a\u5668\u4eba\u5b8c\u6210\u7279\u5b9a\u4efb\u52a1\u4e2d\u626e\u6f14\u4e86\u4e00\u4e2a\u5173\u952e\u89d2\u8272\uff0c\u4f8b\u5982\uff1a<\/p>\n\n\n\n<ul><li>\u901a\u8fc7\u6a21\u4eff\u4e00\u4e2a\u52a8\u4f5c\u6765\u5b66\u4e60\u8be5\u52a8\u4f5c-learning a movement by imitating a single demonstration\u3002<a href=\"https:\/\/ieeexplore.ieee.org\/document\/5509429\" target=\"_blank\" rel=\"noreferrer noopener\">IEEE****<\/a><\/li><li>\u4ece\u5c11\u91cf\u793a\u4f8b\u4e2d\u5b66\u4e60\u64cd\u4f5c\u52a8\u4f5c-learning manipulation actions from a few demonstrations\u3002<a href=\"https:\/\/www.semanticscholar.org\/paper\/Learning-manipulation-actions-from-a-few-Abdo-Kretzschmar\/cb5a0ef9fcafdf160fcfbb62387b246b786c82cd\" target=\"_blank\" rel=\"noreferrer noopener\">IEEE*****<\/a><\/li><li>\u89c6\u89c9\u5bfc\u822a-visual navigation\u3002<a href=\"http:\/\/proceedings.mlr.press\/v70\/finn17a.html\" target=\"_blank\" rel=\"noreferrer noopener\">PMLR<\/a><\/li><li>\u8fde\u7eed\u63a7\u5236-continuous control\u3002<a href=\"https:\/\/dl.acm.org\/doi\/10.5555\/3327757.3327835\" target=\"_blank\" rel=\"noreferrer noopener\">NIPS*****<\/a><\/li><\/ul>\n\n\n\n<p><strong>5.4&nbsp;\u58f0\u4fe1\u53f7\u5904\u7406<\/strong>\uff1a\u5305\u542b\u6709\u5173\u58f0\u97f3\u4fe1\u606f\u7684\u6570\u636e\u53ef\u4ee5\u901a\u8fc7\u58f0\u4fe1\u53f7\u5904\u7406\u8fdb\u884c\u5206\u6790\uff0c\u5c0f\u6837\u672c\u5728\u8be5\u65b9\u5411\u7684\u5e94\u7528\u6709\uff1a<\/p>\n\n\n\n<ul><li>\u4ece\u7528\u6237\u5c11\u91cf\u7684\u97f3\u9891\u6837\u672c\u4e2d\u514b\u9686\u58f0\u97f3(<a href=\"https:\/\/arxiv.org\/abs\/1802.06006\" target=\"_blank\" rel=\"noreferrer noopener\">voice&nbsp;cloneing<\/a>)\uff0c\u5982\u5bfc\u822aapp\u4e2d\u7684\u58f0\u97f3\u3001Siri\u7b49\u3002<\/li><li>\u53d8\u58f0-<a href=\"https:\/\/arxiv.org\/abs\/1808.05294\" target=\"_blank\" rel=\"noreferrer noopener\">voice&nbsp;conversion<\/a><\/li><li>\u4e0d\u540c\u8bed\u8a00\u4e4b\u524d\u7684\u58f0\u97f3\u8f6c\u6362<\/li><\/ul>\n\n\n\n<p><strong>5.5&nbsp;\u5176\u5b83\u5e94\u7528<\/strong>\uff1a<\/p>\n\n\n\n<ul><li>\u533b\u5b66\u5e94\u7528(\u5982\uff0c<a href=\"https:\/\/pubs.acs.org\/doi\/10.1021\/acscentsci.6b00367\" target=\"_blank\" rel=\"noreferrer noopener\">few-shot drug discovery<\/a>)\u3002<\/li><li>\u5355\u6837\u672c\u7ed3\u6784\u641c\u7d22-<a href=\"https:\/\/arxiv.org\/abs\/1708.05344\" target=\"_blank\" rel=\"noreferrer noopener\">one-shot architecture search<\/a>\uff1a\u901a\u8fc7\u4e00\u6b21\u8bad\u7ec3\u8d85\u7f51(\u8d85\u7f51\u662f\u51e0\u4e2aInternet Protocol (IP)\u7f51\u7edc\u6216\u5b50\u7f51\u7684\u7ec4\u5408\uff0c\u7ec4\u6210\u4e00\u4e2a\u5177\u6709\u5355\u4e00\u65e0\u5206\u7c7b\u57df\u95f4\u8def\u7531(CIDR)\u524d\u7f00\u7684\u7f51\u7edc)\u6765\u5bfb\u627e\u4f53\u7cfb\u7ed3\u6784\u3002<\/li><li>\u6570\u5b66\u5e94\u7528<ul><li>\u66f2\u7ebf\u62df\u5408-<a href=\"https:\/\/papers.nips.cc\/paper\/8161-probabilistic-model-agnostic-meta-learning\" target=\"_blank\" rel=\"noreferrer noopener\">curve-fitting<\/a><\/li><li>\u7406\u89e3\u6570\u5b57\u7c7b\u63a8\uff0c\u901a\u8fc7\u903b\u8f91\u63a8\u7406(<a href=\"https:\/\/arxiv.org\/abs\/1902.02527\" target=\"_blank\" rel=\"noreferrer noopener\">logic reasoning<\/a>)\u6765\u6267\u884c\u8ba1\u7b97<\/li><\/ul><\/li><\/ul>\n\n\n\n<h2 class=\"has-light-pink-background-color has-background\" id=\"6.%C2%A0Python%E5%AE%9E%E7%8E%B0\">6.&nbsp;Python\u5b9e\u73b0<\/h2>\n\n\n\n<ul><li><a href=\"https:\/\/tristandeleu.github.io\/pytorch-meta\/\" target=\"_blank\" rel=\"noreferrer noopener\">Pytorch \u2013 Torchmeta<\/a>\uff1a\u4e00\u4e2a\u7528\u4e8e\u5c0f\u6837\u672c\u5206\u7c7b\u548c\u56de\u5f52\u95ee\u9898\u7684\u5e93\uff0c\u53ef\u4ee5\u4f5c\u4e3a\u591a\u4e2a\u95ee\u9898\u7684\u57fa\u7ebf\u3002<\/li><li><a href=\"https:\/\/github.com\/thunlp\/FewRel\" target=\"_blank\" rel=\"noreferrer noopener\">FewRel<\/a>\uff1a\u4e00\u4e2a\u5927\u89c4\u6a21\u7684\u5c0f\u6837\u672c\u5173\u7cfb\u63d0\u53d6\u6570\u636e\u96c6\uff0c\u5305\u542b\u4e86100+\u5173\u7cfb\u548c\u5f88\u591a\u5df2\u6807\u6ce8\u7684\u8de8\u9886\u57df\u7684\u5b9e\u4f8b\u3002<\/li><li><a href=\"https:\/\/github.com\/yaoyao-liu\/meta-transfer-learning\" target=\"_blank\" rel=\"noreferrer noopener\">Meta Transfer Learning<\/a>\uff1a\u8fd9\u4e2a\u5e93\u5305\u542b\u4e86<a href=\"http:\/\/openaccess.thecvf.com\/content_CVPR_2019\/papers\/Sun_Meta-Transfer_Learning_for_Few-Shot_Learning_CVPR_2019_paper.pdf\" target=\"_blank\" rel=\"noreferrer noopener\">\u57fa\u4e8e\u5143\u8fc1\u79fb\u5b66\u4e60\u7684\u5c0f\u6837\u672c\u5b66\u4e60<\/a>\u7684TensorFlow\u548cPyTorch\u5b9e\u73b0\u3002<\/li><li><a href=\"https:\/\/github.com\/oscarknagg\/few-shot\" target=\"_blank\" rel=\"noreferrer noopener\">Few&nbsp;Shot<\/a>\uff1a\u7eaf\u51c0\u3001\u6613\u8bfb\u3001\u6709\u6d4b\u8bd5\u4ee3\u7801\u7684\u5c0f\u6837\u672c\u5b66\u4e60\u7814\u7a76\u590d\u73b0\u5e93\u3002<\/li><li>Few-Shot Object Detection (<a href=\"https:\/\/github.com\/ucbdrive\/few-shot-object-detection\" target=\"_blank\" rel=\"noreferrer noopener\">FsDet<\/a>)\uff1a\u5305\u542b\u57fa\u4e8e\u5c0f\u6837\u672c\u7684\u7269\u4f53\u68c0\u6d4b\u201c<a href=\"https:\/\/arxiv.org\/abs\/2003.06957\" target=\"_blank\" rel=\"noreferrer noopener\">Simple Few-Shot Object Detection<\/a>\u201d\u7684\u5b98\u65b9\u5b9e\u73b0\u3002<\/li><li><a href=\"https:\/\/github.com\/cnielly\/prototypical-networks-omniglot\" target=\"_blank\" rel=\"noreferrer noopener\">Prototypical Networks on the Omniglot Dataset<\/a>\uff1a&nbsp;\u201cPrototypical Networks for Few-shot Learning\u201d&nbsp;\u7684Pytorch\u5b9e\u73b0\u3002<\/li><\/ul>\n\n\n\n<h2 class=\"has-light-pink-background-color has-background\" id=\"%E6%9C%BA%E5%99%A8%E5%AD%A6%E4%B9%A0%E7%9A%84%E6%9C%AA%E6%9D%A5\"><a><\/a>\u673a\u5668\u5b66\u4e60\u7684\u672a\u6765<\/h2>\n\n\n\n<p><a rel=\"noreferrer noopener\" href=\"https:\/\/www.research.ibm.com\/haifa\/dept\/imt\/ist_dm.shtml\" target=\"_blank\">IBM<\/a>\u7814\u7a76\u8868\u660e\uff0c\u673a\u5668\u5b66\u4e60\u5728\u672a\u6765\u5c06\u56f4\u7ed5\u4ee5\u4e0b\u9886\u57df\u53d1\u5c55\uff1a<\/p>\n\n\n\n<ul><li>\u7ecf\u5178\u673a\u5668\u5b66\u4e60\uff1a\u4e00\u6b21\u5904\u7406\u4e00\u4e2a\u6570\u636e\u96c6\u3001\u4e00\u4e2a\u4efb\u52a1\u548c\u4e00\u4e2a\u7e41\u91cd\u8bad\u7ec3\u7684\u95ee\u9898<\/li><li>\u57fa\u4e8e\u5c0f\u6837\u672c\u7684\u673a\u5668\u5b66\u4e60\uff1a\u5904\u7406\u5927\u91cf\u7684\u79bb\u7ebf\u8bad\u7ec3\uff0c\u7136\u540e\u5728\u7c7b\u4f3c\u7684\u4efb\u52a1\u4e0a\u8f7b\u677e\u5b66\u4e60<\/li><li>\u53d1\u5c55\u4e2d\u7684\u673a\u5668\u5b66\u4e60\uff1a\u6301\u7eed\u5b66\u4e60\u5404\u79cd\u4efb\u52a1\u3002<\/li><\/ul>\n","protected":false},"excerpt":{"rendered":"<p>\u6458\u81ea: https:\/\/research.aimultiple.com\/few-shot-learning\/  &hellip; <a href=\"http:\/\/139.9.1.231\/index.php\/2022\/08\/09\/few-shot-learning-fsl\/\" class=\"more-link\">\u7ee7\u7eed\u9605\u8bfb<span class=\"screen-reader-text\">Few-Shot Learning (FSL): \u5c0f\u6837\u672c\u5b66\u4e60\u7b80\u4ecb\u53ca\u5176\u5e94\u7528<\/span><\/a><\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":[],"categories":[19,4,12],"tags":[],"_links":{"self":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts\/5388"}],"collection":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/comments?post=5388"}],"version-history":[{"count":37,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts\/5388\/revisions"}],"predecessor-version":[{"id":5455,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts\/5388\/revisions\/5455"}],"wp:attachment":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/media?parent=5388"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/categories?post=5388"},{"taxonomy":"post_tag","embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/tags?post=5388"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}