赞
踩
1.
使用之前用于注意力的BiFormer在这里用于主干修改。
YOLOv10改进 | 注意力篇 | YOLOv10引入BiFormer注意力机制
2. 核心代码
- from collections import OrderedDict
- from functools import partial
- from typing import Optional, Union
- import torch
- import torch.nn as nn
- import torch.nn.functional as F
- from einops import rearrange
- from einops.layers.torch import Rearrange
- from timm.models import register_model
- from timm.models.layers import DropPath, to_2tuple, trunc_normal_
- from timm.models.vision_transformer import _cfg
- from typing import Tuple
- from torch import Tensor
-
-
- class DWConv(nn.Module):
- def __init__(self, dim):
- super(DWConv, self).__init__()
- self.dwconv =

Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。