Skip to content

Commit

Permalink
Merge pull request #6 from KastusKalinovski/master
Browse files Browse the repository at this point in the history
Master
  • Loading branch information
zzmicer authored Feb 9, 2021
2 parents d5b4912 + 6fda29c commit b6fae56
Show file tree
Hide file tree
Showing 15 changed files with 441 additions and 276 deletions.
63 changes: 63 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]

# C extensions
*.so

# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.cache
nosetests.xml
coverage.xml

# Translations
*.mo
*.pot

# Django stuff:
*.log

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# IPython notebook
.ipynb_checkpoints

# Repo scratch directory
scratch/

# Misc
.DS_Store
269 changes: 0 additions & 269 deletions layers.py

This file was deleted.

7 changes: 0 additions & 7 deletions metrics.py

This file was deleted.

Empty file added src/__init__.py
Empty file.
File renamed without changes.
Empty file added src/layers/__init__.py
Empty file.
30 changes: 30 additions & 0 deletions src/layers/activations.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import numpy as np
from src.utils import sigmoid

class Relu:
'''Rectified Linear Unit activation function'''
def __init__(self):
self.params = []

def forward(self, batch):
self.X = batch
return np.maximum(0, self.X)

def backward(self, gradient):
flow_gradient = gradient.copy()
flow_gradient[self.X <= 0] = 0
return flow_gradient, []


class Sigmoid():
'''Sigmoid activation function'''
def __init__(self):
self.params = []

def forward(self, batch):
self.X = np.array(sigmoid(x) for x in batch)
return self.X

def backward(self, gradient):
flow_gradient = gradient*self.X*(1-self.X)
return flow_gradient, []
Loading

0 comments on commit b6fae56

Please sign in to comment.