#192 include top by nwschurink · Pull Request #208 · lukemelas/EfficientNet-PyTorch

I have added the functionality as requested in this issue: #192 and this issue #207
It allows passing an option during model initialisation that will omit the final layers of the model similar as to how this is implemented in the Keras version of EfficientNet.

from efficientnet_pytorch import EfficientNet
from efficientnet_pytorch.utils import MemoryEfficientSwish
from torchsummary import summary
from torch import nn

# A model with the final layers
model = EfficientNet.from_name("efficientnet-b0", num_classes=2, include_top=True, in_channels=1)
model.to('cuda')
summary(model,input_size=(1,100,100))

# A model without the final layers
model = EfficientNet.from_name("efficientnet-b0", num_classes=2, include_top=False, in_channels=1)
model.to('cuda')
summary(model,input_size=(1,100,100))

# A custom model build on top of the feature extraction part of EfficientNet
model = EfficientNet.from_name("efficientnet-b0", num_classes=2, include_top=False, in_channels=1)
custom_model = nn.Sequential(model,nn.Dropout(0.2),nn.Flatten(),nn.Linear(1280,100),nn.Linear(100,2),MemoryEfficientSwish())
custom_model.to('cuda')
summary(custom_model,input_size=(1,100,100))

# A custom model with pre-trained feature extraction layers
model = EfficientNet.from_name("efficientnet-b0", num_classes=2, include_top=False, in_channels=1)
custom_model = nn.Sequential(model,nn.Dropout(0.2),nn.Linear(1280,100),nn.Linear(100,2),MemoryEfficientSwish())
custom_model.to('cuda')
summary(model,input_size=(1,100,100))