{"id":6683,"date":"2022-08-30T14:41:00","date_gmt":"2022-08-30T06:41:00","guid":{"rendered":"http:\/\/139.9.1.231\/?p=6683"},"modified":"2022-10-21T17:22:22","modified_gmt":"2022-10-21T09:22:22","slug":"deeplab-v3-2","status":"publish","type":"post","link":"http:\/\/139.9.1.231\/index.php\/2022\/08\/30\/deeplab-v3-2\/","title":{"rendered":"Deeplab v3"},"content":{"rendered":"\n<p>\u8bba\u6587\u5730\u5740\uff1a <a href=\"https:\/\/arxiv.org\/abs\/1706.05587\" target=\"_blank\" rel=\"noreferrer noopener\">https:\/\/arxiv.org\/abs\/1706.05587<\/a><\/p>\n\n\n\n<figure class=\"wp-block-image size-full\"><img loading=\"lazy\" width=\"754\" height=\"167\" src=\"http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-449.png\" alt=\"\" class=\"wp-image-6688\" srcset=\"http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-449.png 754w, http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-449-300x66.png 300w\" sizes=\"(max-width: 754px) 100vw, 754px\" \/><\/figure>\n\n\n\n<p>Deeplab v3\u662fv2\u7248\u672c\u7684\u8fdb\u4e00\u6b65\u5347\u7ea7\uff0c\u4f5c\u8005\u4eec\u5728\u5bf9\u7a7a\u6d1e\u5377\u79ef\u91cd\u65b0\u601d\u8003\u7684\u57fa\u7840\u4e0a\uff0c\u8fdb\u4e00\u6b65\u5bf9Deeplab\u7cfb\u5217\u7684\u57fa\u672c\u6846\u67b6\u8fdb\u884c\u4e86\u4f18\u5316\uff0c\u53bb\u6389\u4e86v1\u548cv2\u7248\u672c\u4e2d\u4e00\u76f4\u575a\u6301\u7684CRF\u540e\u5904\u7406\u6a21\u5757\uff0c\u5347\u7ea7\u4e86\u4e3b\u5e72\u7f51\u7edc\u548cASPP\u6a21\u5757\uff0c\u4f7f\u5f97\u7f51\u7edc\u80fd\u591f\u66f4\u597d\u5730\u5904\u7406\u8bed\u4e49\u5206\u5272\u4e2d\u7684\u591a\u5c3a\u5ea6\u95ee\u9898\u3002\u63d0\u51faDeeplab v3\u7684\u8bba\u6587\u4e3aRethinking Atrous Convolution for Semantic Image Segmentation\uff0c\u662fDeeplab\u7cfb\u5217\u540e\u671f\u7f51\u7edc\u7684\u4ee3\u8868\u6a21\u578b\u3002<\/p>\n\n\n\n<p>DeepLab V3\u7684\u6539\u8fdb\u4e3b\u8981\u5305\u62ec\u4ee5\u4e0b\u51e0\u65b9\u9762\uff1a<\/p>\n\n\n\n<p>1\uff09\u63d0\u51fa\u4e86\u66f4\u901a\u7528\u7684\u6846\u67b6\uff0c\u9002\u7528\u4e8e\u4efb\u4f55\u7f51\u7edc<\/p>\n\n\n\n<p>2\uff09\u590d\u5236\u4e86ResNet\u6700\u540e\u7684block\uff0c\u5e76\u7ea7\u8054\u8d77\u6765<\/p>\n\n\n\n<p>3\uff09\u5728ASPP\u4e2d\u4f7f\u7528BN\u5c42<\/p>\n\n\n\n<p>4\uff09\u53bb\u6389\u4e86CRF<\/p>\n\n\n\n<p>\u968f\u7740\u8bed\u4e49\u5206\u5272\u7684\u53d1\u5c55\uff0c\u9010\u6e10\u6709\u4e24\u5927\u95ee\u9898\u4e9f\u5f85\u89e3\u51b3\uff1a\u4e00\u4e2a\u662f\u8fde\u7eed\u7684\u6c60\u5316\u548c\u5377\u79ef\u6b65\u957f\u5bfc\u81f4\u7684\u4e0b\u91c7\u6837\u56fe\u50cf\u4fe1\u606f\u4e22\u5931\u95ee\u9898\uff0c\u8fd9\u4e2a\u95ee\u9898\u5df2\u7ecf\u901a\u8fc7\u7a7a\u6d1e\u5377\u79ef\u6269\u5927\u611f\u53d7\u91ce\u5f97\u5230\u6bd4\u8f83\u597d\u7684\u5904\u7406\uff1b\u53e6\u5916\u4e00\u4e2a\u5219\u662f\u591a\u5c3a\u5ea6\u548c\u5229\u7528\u4e0a\u4e0b\u6587\u4fe1\u606f\u95ee\u9898\u3002\u8bba\u6587\u4e2d\u5206\u522b\u56de\u987e\u4e86\u56db\u79cd\u57fa\u4e8e\u591a\u5c3a\u5ea6\u548c\u4e0a\u4e0b\u6587\u4fe1\u606f\u8fdb\u884c\u8bed\u4e49\u5206\u5272\u7684\u65b9\u6cd5\uff0c\u5982\u56fe1\u6240\u793a\uff0c\u5305\u62ec\u56fe\u50cf\u91d1\u5b57\u5854\u3001\u7f16\u89e3\u7801\u67b6\u6784\u3001\u6df1\u5ea6\u7a7a\u6d1e\u5377\u79ef\u7f51\u7edc\u4ee5\u53ca\u7a7a\u95f4\u91d1\u5b57\u5854\u6c60\u5316\uff0c\u8fd9\u56db\u79cd\u65b9\u6cd5\u5404\u6709\u4f18\u7f3a\u70b9\uff0cASPP\u53ef\u4ee5\u7b97\u662f\u6df1\u5ea6\u7a7a\u6d1e\u5377\u79ef\u548c\u7a7a\u95f4\u91d1\u5b57\u5854\u6c60\u5316\u7684\u4e00\u79cd\u7ed3\u5408\uff0cDeeplab v3\u5728v2\u7684ASPP\u57fa\u7840\u4e0a\uff0c\u8fdb\u4e00\u6b65\u63a2\u7d22\u4e86\u7a7a\u6d1e\u5377\u79ef\u5728\u591a\u5c3a\u5ea6\u548c\u4e0a\u4e0b\u6587\u4fe1\u606f\u4e2d\u7684\u4f5c\u7528\u3002<\/p>\n\n\n\n<figure class=\"wp-block-image size-full\"><img loading=\"lazy\" width=\"943\" height=\"267\" src=\"http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-450.png\" alt=\"\" class=\"wp-image-6689\" srcset=\"http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-450.png 943w, http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-450-300x85.png 300w, http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-450-768x217.png 768w\" sizes=\"(max-width: 943px) 100vw, 943px\" \/><\/figure>\n\n\n\n<p>Deeplab v3\u53ef\u4f5c\u4e3a\u901a\u7528\u6846\u67b6\u878d\u5165\u5230\u4efb\u610f\u7f51\u7edc\u7ed3\u6784\u4e2d\uff0c\u5177\u4f53\u5730\uff0c\u4ee5\u4e32\u884c\u65b9\u5f0f\u8bbe\u8ba1\u7a7a\u6d1e\u5377\u79ef\u6a21\u5757\uff0c\u590d\u5236ResNet\u7684\u6700\u540e\u4e00\u4e2a\u5377\u79ef\u5757\uff0c\u5e76\u5c06\u590d\u5236\u540e\u7684\u5377\u79ef\u5757\u4ee5\u4e32\u884c\u65b9\u5f0f\u8fdb\u884c\u7ea7\u8054\uff0c\u5982\u56fe2\u6240\u793a\u3002DeepLab V3\u5c06\u7a7a\u6d1e\u5377\u79ef\u5e94\u7528\u5728\u7ea7\u8054\u6a21\u5757\u3002\u5177\u4f53\u6765\u8bf4\uff0c\u6211\u4eec\u53d6ResNet\u4e2d\u6700\u540e\u4e00\u4e2ablock\uff0c\u5728\u4e0b\u56fe\u4e2d\u4e3ablock4\uff0c\u5e76\u5728\u5176\u540e\u9762\u589e\u52a0\u7ea7\u8054\u6a21\u5757\u3002<\/p>\n\n\n\n<figure class=\"wp-block-image size-full\"><img loading=\"lazy\" width=\"890\" height=\"348\" src=\"http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-451.png\" alt=\"\" class=\"wp-image-6690\" srcset=\"http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-451.png 890w, http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-451-300x117.png 300w, http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-451-768x300.png 768w\" sizes=\"(max-width: 890px) 100vw, 890px\" \/><\/figure>\n\n\n\n<p>\u5728\u5377\u79ef\u5757\u4e32\u884c\u7ea7\u8054\u57fa\u7840\u4e0a\uff0cDeeplab v3\u53c8\u5bf9ASPP\u6a21\u5757\u8fdb\u884c\u5e76\u884c\u7ea7\u8054\uff0cv3\u5bf9ASPP\u6a21\u5757\u8fdb\u884c\u4e86\u5347\u7ea7\uff0c\u76f8\u8f83\u4e8ev2\u7248\u672c\u52a0\u5165\u4e86\u6279\u5f52\u4e00\u5316\uff08Batch Normalization\uff0cBN\uff09\uff0c\u901a\u8fc7\u7ec4\u7ec7\u4e0d\u540c\u7684\u7a7a\u6d1e\u6269\u5f20\u7387\u7684\u5377\u79ef\u5757\uff0c\u540c\u65f6\u52a0\u5165\u56fe\u50cf\u7ea7\u7279\u5f81\uff0c\u80fd\u591f\u66f4\u597d\u5730\u6355\u6349\u591a\u5c3a\u5ea6\u4e0a\u4e0b\u6587\u4fe1\u606f\uff0c\u5e76\u4e14\u4e5f\u80fd\u591f\u66f4\u5bb9\u6613\u8bad\u7ec3\uff0c\u5982\u56fe3\u6240\u793a\u3002<\/p>\n\n\n\n<p>1\uff09ASPP\u4e2d\u5e94\u7528\u4e86BN\u5c42<\/p>\n\n\n\n<p>2\uff09\u968f\u7740\u91c7\u6837\u7387\u7684\u589e\u52a0\uff0c\u6ee4\u6ce2\u5668\u4e2d\u6709\u6548\u7684\u6743\u91cd\u51cf\u5c11\u4e86(\u6709\u6548\u6743\u91cd\u51cf\u5c11\uff0c\u96be\u4ee5\u6355\u83b7\u539f\u8ddd\u79bb\u4fe1\u606f\uff0c\u8fd9\u8981\u6c42\u5408\u7406\u63a7\u5236\u91c7\u6837\u7387\u7684\u8bbe\u7f6e)<\/p>\n\n\n\n<p>3\uff09\u4f7f\u7528\u6a21\u578b\u6700\u540e\u7684\u7279\u5f81\u6620\u5c04\u7684\u5168\u5c40\u5e73\u5747\u6c60\u5316(\u4e3a\u4e86\u514b\u670d\u8fdc\u8ddd\u79bb\u4e0b\u6709\u6548\u6743\u91cd\u51cf\u5c11\u7684\u95ee\u9898)<\/p>\n\n\n\n<figure class=\"wp-block-image size-full\"><img loading=\"lazy\" width=\"833\" height=\"201\" src=\"http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-452.png\" alt=\"\" class=\"wp-image-6691\" srcset=\"http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-452.png 833w, http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-452-300x72.png 300w, http:\/\/139.9.1.231\/wp-content\/uploads\/2022\/08\/image-452-768x185.png 768w\" sizes=\"(max-width: 833px) 100vw, 833px\" \/><\/figure>\n\n\n\n<p>\u603b\u7ed3\u6765\u770b\uff0cDeeplab v3\u66f4\u5145\u5206\u7684\u5229\u7528\u4e86\u7a7a\u6d1e\u5377\u79ef\u6765\u83b7\u53d6\u5927\u8303\u56f4\u7684\u56fe\u50cf\u4e0a\u4e0b\u6587\u4fe1\u606f\u3002\u5177\u4f53\u5305\u62ec\uff1a\u591a\u5c3a\u5ea6\u4fe1\u606f\u7f16\u7801\u3001\u5e26\u6709\u9010\u6b65\u7ffb\u500d\u7684\u7a7a\u6d1e\u6269\u5f20\u7387\u7684\u7ea7\u8054\u6a21\u5757\u4ee5\u53ca\u5e26\u6709\u56fe\u50cf\u7ea7\u7279\u5f81\u7684ASPP\u6a21\u5757\u3002\u5b9e\u9a8c\u7ed3\u679c\u8868\u660e\uff0c\u8be5\u6a21\u578b\u5728 PASCAL VOC\u6570\u636e\u96c6\u4e0a\u76f8\u8f83\u4e8ev2\u7248\u672c\u6709\u4e86\u663e\u7740\u8fdb\u6b65\uff0c\u53d6\u5f97\u4e86\u5f53\u65f6SOTA\u7cbe\u5ea6\u6c34\u5e73\u3002<\/p>\n\n\n\n<p>Deeplab v3\u7684PyTorch\u5b9e\u73b0\u53ef\u53c2\u8003\uff1a<\/p>\n\n\n\n<p><a href=\"https:\/\/github.com\/pytorch\/vision\/blob\/main\/torchvision\/models\/segmentation\/deeplabv3.py\">https:\/\/github.com\/pytorch\/vision\/blob\/main\/torchvision\/models\/segmentation\/deeplabv3.py<\/a><\/p>\n\n\n\n<p>\u4ee3\u7801\u5b9e\u73b0\uff1a<\/p>\n\n\n\n<pre class=\"wp-block-code\"><code>class DeeplabV3(ResNet):\n\n    def __init__(self, n_class, block, layers, pyramids, grids, output_stride=16):\n        self.inplanes = 64\n        super(DeeplabV3, self).__init__()\n        if output_stride == 16:\n            strides = &#91;1, 2, 2, 1]\n            rates = &#91;1, 1, 1, 2]\n        elif output_stride == 8:\n            strides = &#91;1, 2, 1, 1]\n            rates = &#91;1, 1, 2, 2]\n        else:\n            raise NotImplementedError\n\n        # Backbone Modules\n        self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,\n                               bias=False)\n        self.bn1 = nn.BatchNorm2d(64)\n        self.relu = nn.ReLU(inplace=True)\n        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)   # h\/4, w\/4\n\n        self.layer1 = self._make_layer(block, 64, layers&#91;0], stride=strides&#91;0], rate=rates&#91;0]) # h\/4, w\/4\n        self.layer2 = self._make_layer(block, 128, layers&#91;1], stride=strides&#91;1], rate=rates&#91;1]) # h\/8, w\/8\n        self.layer3 = self._make_layer(block, 256, layers&#91;2], stride=strides&#91;2], rate=rates&#91;2]) # h\/16,w\/16\n        self.layer4 = self._make_MG_unit(block, 512, blocks=grids, stride=strides&#91;3], rate=rates&#91;3])# h\/16,w\/16\n\n        # Deeplab Modules\n        self.aspp1 = ASPP_module(2048, 256, rate=pyramids&#91;0])  \n        self.aspp2 = ASPP_module(2048, 256, rate=pyramids&#91;1])\n        self.aspp3 = ASPP_module(2048, 256, rate=pyramids&#91;2])\n        self.aspp4 = ASPP_module(2048, 256, rate=pyramids&#91;3])\n\n        self.global_avg_pool = nn.Sequential(nn.AdaptiveAvgPool2d((1, 1)),\n                                             nn.Conv2d(2048, 256, kernel_size=1, stride=1, bias=False),\n                                             nn.BatchNorm2d(256),\n                                             nn.ReLU())\n\n        # get result features from the concat\n        self._conv1 = nn.Sequential(nn.Conv2d(1280, 256, kernel_size=1, stride=1, bias=False),\n                                    nn.BatchNorm2d(256),\n                                    nn.ReLU())\n\n        # generate the final logits\n        self._conv2 = nn.Conv2d(256, n_class, kernel_size=1, bias=False)\n\n        self.init_weight()\n\n    def forward(self, input):\n        x = self.conv1(input)\n        x = self.bn1(x)\n        x = self.relu(x)\n        x = self.maxpool(x)\n\n        x = self.layer1(x)\n        x = self.layer2(x)\n        x = self.layer3(x)\n        x = self.layer4(x)\n\n        x1 = self.aspp1(x)\n        x2 = self.aspp2(x)\n        x3 = self.aspp3(x)\n        x4 = self.aspp4(x)\n\n        # image-level features\n        x5 = self.global_avg_pool(x)\n        x5 = F.upsample(x5, size=x4.size()&#91;2:], mode='bilinear', align_corners=True)\n\n        x = torch.cat((x1, x2, x3, x4, x5), dim=1)\n\n        x = self._conv1(x)\n        x = self._conv2(x)\n\n        x = F.upsample(x, size=input.size()&#91;2:], mode='bilinear', align_corners=True)\n\n        return x<\/code><\/pre>\n\n\n\n<p>\u5176\u4e2d\u91cd\u8981\u7684_make<em>layer,\u00a0<\/em>_make_MG_unit\u548cASSP\u6a21\u5757\u5b9e\u73b0\u5982\u4e0b:<\/p>\n\n\n\n<pre class=\"wp-block-code\"><code>def _make_layer(self, block, planes, blocks, stride=1, rate=1):\n        downsample = None\n        if stride != 1 or self.inplanes != planes * block.expansion:\n            downsample = nn.Sequential(\n                nn.Conv2d(self.inplanes, planes * block.expansion,\n                          kernel_size=1, stride=stride, bias=False),\n                nn.BatchNorm2d(planes * block.expansion),\n            )\n\n        layers = &#91;]\n        layers.append(block(self.inplanes, planes, stride, rate, downsample))\n        self.inplanes = planes * block.expansion\n        for i in range(1, blocks):\n            layers.append(block(self.inplanes, planes))\n\n        return nn.Sequential(*layers)\n\n    def _make_MG_unit(self, block, planes, blocks=&#91;1, 2, 4], stride=1, rate=1):\n        downsample = None\n        if stride != 1 or self.inplanes != planes * block.expansion:\n            downsample = nn.Sequential(\n                nn.Conv2d(self.inplanes, planes * block.expansion,\n                          kernel_size=1, stride=stride, bias=False),\n                nn.BatchNorm2d(planes * block.expansion),\n            )\n\n        layers = &#91;]\n        layers.append(block(self.inplanes, planes, stride, rate=blocks&#91;0] * rate, downsample=downsample))\n        self.inplanes = planes * block.expansion\n        for i in range(1, len(blocks)):\n            layers.append(block(self.inplanes, planes, stride=1, rate=blocks&#91;i] * rate))\n\n        return nn.Sequential(*layers)\n\n\nclass ASPP_module(nn.Module):\n    def __init__(self, inplanes, planes, rate):\n        super(ASPP_module, self).__init__()\n        if rate == 1:\n            kernel_size = 1\n            padding = 0\n        else:\n            kernel_size = 3\n            padding = rate\n        self.atrous_convolution = nn.Conv2d(inplanes, planes, kernel_size=kernel_size,\n                                            stride=1, padding=padding, dilation=rate, bias=False)\n        self.bn = nn.BatchNorm2d(planes)\n        self.relu = nn.ReLU()\n\n        self._init_weight()\n\n    def forward(self, x):\n        x = self.atrous_convolution(x)\n        x = self.bn(x)\n\n        return self.relu(x)\n\n    def _init_weight(self):\n        for m in self.modules():\n            if isinstance(m, nn.Conv2d):\n                torch.nn.init.kaiming_normal_(m.weight)\n            elif isinstance(m, nn.BatchNorm2d):\n                m.weight.data.fill_(1)\n                m.bias.data.zero_()<\/code><\/pre>\n\n\n\n<p>\u8bad\u7ec3\u7b56\u7565\uff1a<\/p>\n\n\n\n<p><strong>Crop size:<\/strong><\/p>\n\n\n\n<ul><li>\u4e3a\u4e86\u5927\u91c7\u6837\u7387\u7684\u7a7a\u6d1e\u5377\u79ef\u80fd\u591f\u6709\u6548\uff0c\u9700\u8981\u8f83\u5927\u7684\u56fe\u7247\u5927\u5c0f\uff1b\u5426\u5219\uff0c\u5927\u91c7\u6837\u7387\u7684\u7a7a\u6d1e\u5377\u79ef\u6743\u503c\u5c31\u4f1a\u4e3b\u8981\u7528\u4e8epadding\u533a\u57df\u3002<\/li><li>\u5728Pascal VOC 2012\u6570\u636e\u96c6\u7684\u8bad\u7ec3\u548c\u6d4b\u8bd5\u4e2d\u6211\u4eec\u91c7\u7528\u4e86513\u7684\u88c1\u526a\u5c3a\u5bf8\u3002<\/li><\/ul>\n\n\n\n<p><strong>Batch normalization\uff1a<\/strong><\/p>\n\n\n\n<ul><li>\u6211\u4eec\u5728ResNet\u4e4b\u4e0a\u6dfb\u52a0\u7684\u6a21\u5757\u90fd\u5305\u62ecBN\u5c42<\/li><li>\u5f53output_stride=16\u65f6\uff0c\u91c7\u7528batchsize=16\uff0c\u540c\u65f6BN\u5c42\u7684\u53c2\u6570\u505a\u53c2\u6570\u8870\u51cf0.9997\u3002<\/li><li>\u5728\u589e\u5f3a\u7684\u6570\u636e\u96c6\u4e0a\uff0c\u4ee5\u521d\u59cb\u5b66\u4e60\u73870.007\u8bad\u7ec330K\u540e\uff0c\u51bb\u7ed3BN\u5c42\u53c2\u6570\uff0c\u7136\u540e\u91c7\u7528output_stride=8\uff0c\u518d\u4f7f\u7528\u521d\u59cb\u5b66\u4e60\u73870.001\u5728PASCAL\u5b98\u65b9\u7684\u6570\u636e\u96c6\u4e0a\u8bad\u7ec330K\u3002<\/li><li>\u8bad\u7ec3output_stride=16\u6bd4output_stride=8\u8981\u5feb\u5f88\u591a\uff0c\u56e0\u4e3a\u5176\u4e2d\u95f4\u7684\u7279\u5f81\u6620\u5c04\u5728\u7a7a\u95f4\u4e0a\u5c0f\u56db\u500d\u3002\u4f46output_stride=16\u5728\u7279\u5f81\u6620\u5c04\u4e0a\u76f8\u5bf9\u7c97\u7cd9\uff0c\u5feb\u662f\u56e0\u4e3a\u727a\u7272\u4e86\u7cbe\u5ea6\u3002<\/li><\/ul>\n\n\n\n<p><strong>Upsampling logits:<\/strong><\/p>\n\n\n\n<ul><li>\u5728\u5148\u524d\u7684\u5de5\u4f5c\u4e0a\uff0c\u6211\u4eec\u662f\u5c06output_stride=8\u7684\u8f93\u51fa\u4e0eGround Truth\u4e0b\u91c7\u68378\u500d\u505a\u6bd4\u8f83\u3002<\/li><li>\u73b0\u5728\u6211\u4eec\u53d1\u73b0\u4fdd\u6301Ground Truth\u66f4\u91cd\u8981\uff0c\u6545\u6211\u4eec\u662f\u5c06\u6700\u7ec8\u7684\u8f93\u51fa\u4e0a\u91c7\u68378\u500d\u4e0e\u5b8c\u6574\u7684Ground Truth\u6bd4\u8f83\u3002<\/li><\/ul>\n\n\n\n<p><strong>Data augmentation:<\/strong><\/p>\n\n\n\n<p>\u5728\u8bad\u7ec3\u9636\u6bb5\uff0c\u968f\u673a\u7f29\u653e\u8f93\u5165\u56fe\u50cf(\u4ece0.5\u52302.0)\u548c\u968f\u673a\u5de6-\u53f3\u7ffb\u8f6c<\/p>\n","protected":false},"excerpt":{"rendered":"<p>\u8bba\u6587\u5730\u5740\uff1a https:\/\/arxiv.org\/abs\/1706.05587 Deeplab v3\u662fv2\u7248\u672c\u7684 &hellip; <a href=\"http:\/\/139.9.1.231\/index.php\/2022\/08\/30\/deeplab-v3-2\/\" class=\"more-link\">\u7ee7\u7eed\u9605\u8bfb<span class=\"screen-reader-text\">Deeplab v3<\/span><\/a><\/p>\n","protected":false},"author":1,"featured_media":0,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":[],"categories":[24],"tags":[],"_links":{"self":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts\/6683"}],"collection":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/comments?post=6683"}],"version-history":[{"count":15,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts\/6683\/revisions"}],"predecessor-version":[{"id":9235,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/posts\/6683\/revisions\/9235"}],"wp:attachment":[{"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/media?parent=6683"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/categories?post=6683"},{"taxonomy":"post_tag","embeddable":true,"href":"http:\/\/139.9.1.231\/index.php\/wp-json\/wp\/v2\/tags?post=6683"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}