-
-
Save MFreidank/c61f5c762ed9311c5083a04c826396c9 to your computer and use it in GitHub Desktop.
| """ | |
| Created on Thu Oct 26 11:23:47 2017 | |
| Original Author: | |
| @author: Utku Ozbulak - github.com/utkuozbulak | |
| Changes for ResNet Compatibility: | |
| Moritz Freidank - github.com/MFreidank | |
| """ | |
| import torch | |
| from torch.nn import ReLU | |
| from misc_functions import (get_params, | |
| convert_to_grayscale, | |
| save_gradient_images, | |
| get_positive_negative_saliency) | |
| class GuidedBackprop(): | |
| """ | |
| Produces gradients generated with guided back propagation from the given image | |
| """ | |
| def __init__(self, model): | |
| self.model = model | |
| self.gradients = None | |
| # Put model in evaluation mode | |
| self.model.eval() | |
| self.update_relus() | |
| self.hook_layers() | |
| def hook_layers(self): | |
| def hook_function(module, grad_in, grad_out): | |
| self.gradients = grad_in[0] | |
| # Register hook to the first layer | |
| first_layer = list(self.model.children())[0] | |
| first_layer.register_backward_hook(hook_function) | |
| def update_relus(self): | |
| """ | |
| Updates relu activation functions so that it only returns positive gradients | |
| """ | |
| def relu_hook_function(module, grad_in, grad_out): | |
| """ | |
| If there is a negative gradient, changes it to zero | |
| """ | |
| if isinstance(module, ReLU): | |
| return (torch.clamp(grad_in[0], min=0.0),) | |
| # Loop through layers, hook up ReLUs with relu_hook_function | |
| for module in self.model.modules(): | |
| if isinstance(module, ReLU): | |
| module.register_backward_hook(relu_hook_function) | |
| def generate_gradients(self, input_image, target_class): | |
| # Forward pass | |
| model_output = self.model(input_image) | |
| # Zero gradients | |
| self.model.zero_grad() | |
| # Target for backprop | |
| one_hot_output = torch.FloatTensor(1, model_output.size()[-1]).zero_() | |
| one_hot_output[0][target_class] = 1 | |
| # Backward pass | |
| model_output.backward(gradient=one_hot_output) | |
| # Convert Pytorch variable to numpy array | |
| # [0] to get rid of the first channel (1,3,224,224) | |
| gradients_as_arr = self.gradients.data.numpy()[0] | |
| return gradients_as_arr | |
| if __name__ == '__main__': | |
| target_example = 0 # Snake | |
| (original_image, prep_img, target_class, file_name_to_export, pretrained_model) =\ | |
| get_params(target_example) | |
| # Guided backprop | |
| GBP = GuidedBackprop(pretrained_model) | |
| # Get gradients | |
| guided_grads = GBP.generate_gradients(prep_img, target_class) | |
| # Save colored gradients | |
| save_gradient_images(guided_grads, file_name_to_export + '_Guided_BP_color') | |
| # Convert to grayscale | |
| grayscale_guided_grads = convert_to_grayscale(guided_grads) | |
| # Save grayscale gradients | |
| save_gradient_images(grayscale_guided_grads, file_name_to_export + '_Guided_BP_gray') | |
| # Positive and negative saliency maps | |
| pos_sal, neg_sal = get_positive_negative_saliency(guided_grads) | |
| save_gradient_images(pos_sal, file_name_to_export + '_pos_sal') | |
| save_gradient_images(neg_sal, file_name_to_export + '_neg_sal') | |
| print('Guided backprop completed') |
Hi, thanks for the changes. I tried with ResNet34 but I got this error:
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-19-abab40b06bf1> in <module>
----> 1 guided_grads = GBP.generate_gradients(x, y)
2 guided_grads.shape
/mnt/sdh/adam/test/guided_backprop.py in generate_gradients(self, input_image, target_class)
102 # Convert Pytorch variable to numpy array
103 # [0] to get rid of the first channel (1,3,224,224)
--> 104 gradients_as_arr = self.gradients.data.numpy()[0]
105 return gradients_as_arr
AttributeError: 'NoneType' object has no attribute 'data'
Seems gradients wasn't updated when calling functions in the class. Have you encountered this issue with ResNet?
Yes even I am getting the same error
You can solve the NoneType issue by running input_image.requires_grad_(True) before passing the input image through the model in generate_gradients
Hi
I would like to run this code. However, I got error for missing some files.
Could you please share the files
get_params,
convert_to_grayscale,
save_gradient_images,
get_positive_negative_saliency.
Thank you
@hamedbehzadi see repository linked to above. The gist is not stand-alone but a patch for that repository.
The file you are missing is: https://github.com/utkuozbulak/pytorch-cnn-visualizations/blob/master/src/misc_functions.py
Thank you @MFreidank.
The output of generate_gradients has more than 3 channels (like [64,13,13]). So, in function save_gradient_images, I face below error
TypeError: Cannot handle this data type: (1, 1, 13), |u1
Whould you please help me to solve this error?
@hamedbehzadi yes, i have the same error. the output has the same size as the output size of the first layer. it seems the gradients are not calculated w.r.t. the input image.
@MFreidank any hints for this problem? Thanks!
This contains two minor changes in lines
36and50to makeguided_backprop.pyfrom utkuozbulak/pytorch-cnn-visualizations#32 supportresnet18(and above) models. The snake given as example in the original repository looks like this when visualized: