Guest User

Untitled

a guest
Oct 17th, 2018
83
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.80 KB | None | 0 0
  1. def content_loss(base, combination):
  2. return K.sum(K.square(combination - base))
  3.  
  4.  
  5.  
  6. outputs_dict = dict([(layer.name, layer.output) for layer in model.layers])
  7. content_layer = 'block5_conv2'
  8. style_layers = ['block1_conv1',
  9. 'block2_conv1',
  10. 'block3_conv1',
  11. 'block4_conv1',
  12. 'block5_conv1']
  13. total_variation_weight = 1e-4
  14. style_weight = 1.
  15. content_weight = 0.025
  16. #K here refers to the keras backend
  17. loss = K.variable(0.)
  18.  
  19. layer_features = outputs_dict[content_layer]
  20. target_image_features = layer_features[0, :, :, :]
  21. combination_features = layer_features[2, :, :, :]
  22. loss += content_weight * content_loss(target_image_features,
  23. combination_features)
  24.  
  25. return K.sum(K.square(combination - base)) # that's exactly the definition of L2-norm
Add Comment
Please, Sign In to add comment