tensorflow::
    
   ops::
    
   Relu
  
  
   #include <nn_ops.h>
  
  
   Computes rectified linear:
   
    max(features, 0)
   
   .
  
Summary
See: https://en.wikipedia.org/wiki/Rectifier_(neural_networks) Example usage: tf.nn.relu([-2., 0., 3.]).numpy() array([0., 0., 3.], dtype=float32)
Args:
- scope: A Scope object
 
Returns:
- 
     
Output: The activations tensor. 
     Constructors and Destructors | 
   |
|---|---|
     
      
       Relu
      
      (const ::
      
       tensorflow::Scope
      
      & scope, ::
      
       tensorflow::Input
      
      features)
     
      | 
   
     Public attributes | 
   |
|---|---|
     
      
       activations
      
     
     | 
    |
     
      
       operation
      
     
     | 
    |
     Public functions | 
   |
|---|---|
     
      
       node
      
      () const
     
     | 
    
     
       ::tensorflow::Node *
      
      | 
   
     
      
       operator::tensorflow::Input
      
      () const
     
     | 
    
     
      
      | 
   
     
      
       operator::tensorflow::Output
      
      () const
     
     | 
    
     
      
      | 
   
Public attributes
Public functions
node
::tensorflow::Node * node() const
operator::tensorflow::Input
operator::tensorflow::Input() const
operator::tensorflow::Output
operator::tensorflow::Output() const