Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
YunYang1994 authored and YunYang1994 committed Mar 8, 2019
1 parent fae836f commit 0dc040e
Show file tree
Hide file tree
Showing 6 changed files with 1,702 additions and 0 deletions.
80 changes: 80 additions & 0 deletions 1-Introduction/.ipynb_checkpoints/activation-checkpoint.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<Figure size 800x600 with 4 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import tensorflow as tf\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"\n",
"# fake data\n",
"x = np.linspace(-5, 5, 100)\n",
"\n",
"# following are popular activation functions\n",
"y_relu = tf.nn.relu(x)\n",
"y_sigmoid = tf.nn.sigmoid(x)\n",
"y_tanh = tf.nn.tanh(x)\n",
"y_softplus = tf.nn.softplus(x)\n",
"# y_softmax = tf.nn.softmax(x) softmax is a special kind of activation function, it is about probability\n",
"\n",
"# plt to visualize these activation function\n",
"plt.figure(1, figsize=(8, 6))\n",
"plt.subplot(221)\n",
"plt.plot(x, y_relu, c='red', label='relu')\n",
"plt.ylim((-1, 5))\n",
"plt.legend(loc='best')\n",
"\n",
"plt.subplot(222)\n",
"plt.plot(x, y_sigmoid, c='red', label='sigmoid')\n",
"plt.ylim((-0.2, 1.2))\n",
"plt.legend(loc='best')\n",
"\n",
"plt.subplot(223)\n",
"plt.plot(x, y_tanh, c='red', label='tanh')\n",
"plt.ylim((-1.2, 1.2))\n",
"plt.legend(loc='best')\n",
"\n",
"plt.subplot(224)\n",
"plt.plot(x, y_softplus, c='red', label='softplus')\n",
"plt.ylim((-0.2, 6))\n",
"plt.legend(loc='best')\n",
"\n",
"plt.show()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
80 changes: 80 additions & 0 deletions 1-Introduction/activation.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<Figure size 800x600 with 4 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import tensorflow as tf\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"\n",
"# fake data\n",
"x = np.linspace(-5, 5, 100)\n",
"\n",
"# following are popular activation functions\n",
"y_relu = tf.nn.relu(x)\n",
"y_sigmoid = tf.nn.sigmoid(x)\n",
"y_tanh = tf.nn.tanh(x)\n",
"y_softplus = tf.nn.softplus(x)\n",
"# y_softmax = tf.nn.softmax(x) softmax is a special kind of activation function, it is about probability\n",
"\n",
"# plt to visualize these activation function\n",
"plt.figure(1, figsize=(8, 6))\n",
"plt.subplot(221)\n",
"plt.plot(x, y_relu, c='red', label='relu')\n",
"plt.ylim((-1, 5))\n",
"plt.legend(loc='best')\n",
"\n",
"plt.subplot(222)\n",
"plt.plot(x, y_sigmoid, c='red', label='sigmoid')\n",
"plt.ylim((-0.2, 1.2))\n",
"plt.legend(loc='best')\n",
"\n",
"plt.subplot(223)\n",
"plt.plot(x, y_tanh, c='red', label='tanh')\n",
"plt.ylim((-1.2, 1.2))\n",
"plt.legend(loc='best')\n",
"\n",
"plt.subplot(224)\n",
"plt.plot(x, y_softplus, c='red', label='softplus')\n",
"plt.ylim((-0.2, 6))\n",
"plt.legend(loc='best')\n",
"\n",
"plt.show()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
54 changes: 54 additions & 0 deletions 1-Introduction/activation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
#! /usr/bin/env python
# coding=utf-8
#================================================================
# Copyright (C) 2019 * Ltd. All rights reserved.
#
# Editor : VIM
# File name : activation.py
# Author : YunYang1994
# Created date: 2019-03-08 22:05:51
# Description :
#
#================================================================

import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt

# fake data
x = np.linspace(-5, 5, 100)

# following are popular activation functions
y_relu = tf.nn.relu(x)
y_sigmoid = tf.nn.sigmoid(x)
y_tanh = tf.nn.tanh(x)
y_softplus = tf.nn.softplus(x)
# y_softmax = tf.nn.softmax(x) softmax is a special kind of activation function, it is about probability

# plt to visualize these activation function
plt.figure(1, figsize=(8, 6))
plt.subplot(221)
plt.plot(x, y_relu, c='red', label='relu')
plt.ylim((-1, 5))
plt.legend(loc='best')

plt.subplot(222)
plt.plot(x, y_sigmoid, c='red', label='sigmoid')
plt.ylim((-0.2, 1.2))
plt.legend(loc='best')

plt.subplot(223)
plt.plot(x, y_tanh, c='red', label='tanh')
plt.ylim((-1.2, 1.2))
plt.legend(loc='best')

plt.subplot(224)
plt.plot(x, y_softplus, c='red', label='softplus')
plt.ylim((-0.2, 6))
plt.legend(loc='best')

plt.show()




Loading

0 comments on commit 0dc040e

Please sign in to comment.