From 46de867fddb64bd85ece664fee366bb926e199b5 Mon Sep 17 00:00:00 2001 From: Greg Date: Sun, 23 Feb 2020 18:53:50 -0600 Subject: [PATCH] help --- main.py | 6 +++--- models/alexnet.py | 8 +++++--- models/googlenet.py | 8 +++++--- models/model.py | 12 ++++++++++-- models/resnet.py | 8 +++++--- models/save_features.py | 5 +++++ models/train.py | 2 ++ 7 files changed, 35 insertions(+), 14 deletions(-) create mode 100644 models/save_features.py diff --git a/main.py b/main.py index d52b01b..b5da279 100644 --- a/main.py +++ b/main.py @@ -20,7 +20,7 @@ help='accuracy evaluation function') parser.add_argument('-epochs', default=50, required=False, help='number of epochs') -parser.add_argument('-pretrain', default=False, required=False, +parser.add_argument('-pretrain', default=0, required=False, help='pretrain on ImageNet?') parser.add_argument('-step_size', default=7, required=False, help='number of epochs between decreasing learning rate') @@ -50,7 +50,7 @@ "loss_fn": loss.create(args.loss_fn), "acc_fn": accuracy.create(args.acc_fn), "epochs": int(args.epochs), - "pretraining": bool(args.pretrain), + "pretrained": int(args.pretrain), "step_size": int(args.step_size), "feature_extracting": bool(args.feature_extracting), "learning_rate": float(args.lr), @@ -63,7 +63,7 @@ model_param["loss_fn"], model_param["acc_fn"], model_param["epochs"], - model_param["pretraining"], + model_param["pretrained"], model_param["step_size"], model_param["feature_extracting"], model_param["learning_rate"], diff --git a/models/alexnet.py b/models/alexnet.py index e86090a..78cef8d 100644 --- a/models/alexnet.py +++ b/models/alexnet.py @@ -1,12 +1,14 @@ from torchvision import models import torch from models.model import Model +from models.save_features import SaveFeatures class Alexnet(Model): - def __init__(self, loaders, loss_fn, acc_fn, epochs=20, pretraining=True, step_size=7, feature_extracting=False, lr=0.01, output_layers=256, name="Alexnet"): - alexnet = models.alexnet(pretrained=pretraining) + def __init__(self, loaders, loss_fn, acc_fn, epochs=20, pretrained=0, step_size=7, feature_extracting=False, lr=0.01, output_layers=256, name="Alexnet"): + alexnet = models.alexnet(pretrained=True) - super().__init__(loaders, alexnet, loss_fn, acc_fn, epochs, pretraining, step_size, feature_extracting, lr, output_layers, name=name) + super().__init__(loaders, alexnet, loss_fn, acc_fn, epochs, pretrained, step_size, feature_extracting, lr, output_layers, name=name) + self.activated_features = SaveFeatures(self.model.classifier[6]) def get_optimizer(self, lr): num_ftrs = self.model.classifier[6].in_features diff --git a/models/googlenet.py b/models/googlenet.py index 5c68ed6..4dc3443 100644 --- a/models/googlenet.py +++ b/models/googlenet.py @@ -1,12 +1,14 @@ from torchvision import models import torch from models.model import Model +from models.save_features import SaveFeatures class Googlenet(Model): - def __init__(self, loaders, loss_fn, acc_fn, epochs=20, pretraining=True, step_size=7, feature_extracting=False, lr=0.01, output_layers=256, name="Googlenet"): - googlenet = models.googlenet(pretrained=pretraining) + def __init__(self, loaders, loss_fn, acc_fn, epochs=20, pretrained=0, step_size=7, feature_extracting=False, lr=0.01, output_layers=256, name="Googlenet"): + googlenet = models.googlenet(pretrained=True) - super().__init__(loaders, googlenet, loss_fn, acc_fn, epochs, pretraining, step_size, feature_extracting, lr, output_layers, name=name) + super().__init__(loaders, googlenet, loss_fn, acc_fn, epochs, pretrained, step_size, feature_extracting, lr, output_layers, name=name) + # self.activated_features = SaveFeatures(self.model._modules.get('b5')) def get_optimizer(self, lr): num_ftrs = self.model.fc.in_features diff --git a/models/model.py b/models/model.py index ea3a603..69d7826 100644 --- a/models/model.py +++ b/models/model.py @@ -1,11 +1,13 @@ from torch.optim import lr_scheduler +import torch import time import logging from models.train import train_model +from models.save_features import SaveFeatures class Model(): - def __init__(self, dataloaders, model, loss_fn, acc_fn, epochs=20, pretraining=True, step_size=7, feature_extracting=False, lr=0.01, output_layers=256, name="model"): + def __init__(self, dataloaders, model, loss_fn, acc_fn, epochs=20, pretrained=0, step_size=7, feature_extracting=False, lr=0.01, output_layers=256, name="model"): self.epochs = epochs self.loss_fn = loss_fn @@ -23,6 +25,9 @@ def __init__(self, dataloaders, model, loss_fn, acc_fn, epochs=20, pretraining=T self.optimizer = self.get_optimizer(lr) self.scheduler = lr_scheduler.StepLR(self.optimizer, step_size=step_size, gamma=0.1) + # for m in self.model.modules(): + # self.init_params(m) + def train(self): start_time = time.time() train_model(self.loaders, self.model, self.loss_fn, self.acc_fn, self.optimizer, self.scheduler, self.epochs, name=self.name) @@ -30,4 +35,7 @@ def train(self): def get_optimizer(self, lr): return optim.SGD(self.model.parameters(), lr=lr, momentum=0.9) - + + def init_params(self, m): + if type(m)==torch.nn.Linear or type(m)==torch.nn.Conv2d: + m.weight.data=torch.randn(m.weight.size())*.01#Random weight initialisation diff --git a/models/resnet.py b/models/resnet.py index 0c221c1..5795a28 100644 --- a/models/resnet.py +++ b/models/resnet.py @@ -1,12 +1,14 @@ from torchvision import models import torch from models.model import Model +from models.save_features import SaveFeatures class Resnet(Model): - def __init__(self, loaders, loss_fn, acc_fn, epochs=20, pretraining=True, step_size=7, feature_extracting=False, lr=0.01, output_layers=256, name="Resnet"): - resnet = models.resnet18(pretrained=pretraining) + def __init__(self, loaders, loss_fn, acc_fn, epochs=20, pretrained=0, step_size=7, feature_extracting=False, lr=0.01, output_layers=256, name="Resnet"): + resnet = models.resnet18(pretrained=True) - super().__init__(loaders, resnet, loss_fn, acc_fn, epochs, pretraining, step_size, feature_extracting, lr, output_layers, name=name) + super().__init__(loaders, resnet, loss_fn, acc_fn, epochs, pretrained, step_size, feature_extracting, lr, output_layers, name=name) + self.activated_features = SaveFeatures(self.model._modules.get('layer4')) def get_optimizer(self, lr): num_ftrs = self.model.fc.in_features diff --git a/models/save_features.py b/models/save_features.py new file mode 100644 index 0000000..dffc14e --- /dev/null +++ b/models/save_features.py @@ -0,0 +1,5 @@ +class SaveFeatures(): + features=None + def __init__(self, m): self.hook = m.register_forward_hook(self.hook_fn) + def hook_fn(self, module, input, output): self.features = ((output.cpu()).data).numpy() + def remove(self): self.hook.remove() \ No newline at end of file diff --git a/models/train.py b/models/train.py index b0ac998..43bd6ee 100644 --- a/models/train.py +++ b/models/train.py @@ -49,6 +49,8 @@ def train_model(dataloaders, model, criterion, acc_fn, optimizer, scheduler, num images, labels = data outputs = model(torch.stack(images).to(device)) + if model.__class__.__name__ is "GoogLeNet": + outputs = outputs.logits labels = torch.IntTensor(labels) loss = criterion(outputs, labels)