Spaces:
Running
on
Zero
Running
on
Zero
File size: 1,716 Bytes
db6a3b7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 |
import torch
import torch.nn as nn
from .. import SparseTensor
class SparseConv3d(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, dilation=1, bias=True, indice_key=None):
super(SparseConv3d, self).__init__()
if 'torchsparse' not in globals():
import torchsparse
self.conv = torchsparse.nn.Conv3d(in_channels, out_channels, kernel_size, stride, 0, dilation, bias)
def forward(self, x: SparseTensor) -> SparseTensor:
out = self.conv(x.data)
new_shape = [x.shape[0], self.conv.out_channels]
out = SparseTensor(out, shape=torch.Size(new_shape), layout=x.layout if all(s == 1 for s in self.conv.stride) else None)
out._spatial_cache = x._spatial_cache
out._scale = tuple([s * stride for s, stride in zip(x._scale, self.conv.stride)])
return out
class SparseInverseConv3d(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, dilation=1, bias=True, indice_key=None):
super(SparseInverseConv3d, self).__init__()
if 'torchsparse' not in globals():
import torchsparse
self.conv = torchsparse.nn.Conv3d(in_channels, out_channels, kernel_size, stride, 0, dilation, bias, transposed=True)
def forward(self, x: SparseTensor) -> SparseTensor:
out = self.conv(x.data)
new_shape = [x.shape[0], self.conv.out_channels]
out = SparseTensor(out, shape=torch.Size(new_shape), layout=x.layout if all(s == 1 for s in self.conv.stride) else None)
out._spatial_cache = x._spatial_cache
out._scale = tuple([s // stride for s, stride in zip(x._scale, self.conv.stride)])
return out
|