Skip to content

Commit

Permalink
Documentation sprint feb2017 (gorgonia#89)
Browse files Browse the repository at this point in the history
  • Loading branch information
chewxy authored Feb 22, 2017
1 parent 5e2b649 commit f9a1a2d
Show file tree
Hide file tree
Showing 67 changed files with 934 additions and 741 deletions.
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,9 @@ There are very few dependencies that Gorgonia uses - and they're all pretty stab
|[gonum/matrix](http://github.com/gonum/matrix)|Compatibility between `Tensor` and Gonum's Matrix|Development of Gorgonia is committed to keeping up with the most updated version|[gonum license](https://github.com/gonum/license) (MIT/BSD-like)|
|[testify/assert](https://github.com/stretchr/testify)|Testing|Can do without but will be a massive pain in the ass to test||[testify licence](https://github.com/stretchr/testify/blob/master/LICENSE) (MIT/BSD-like)|

#Keeping Updated#

Gorgonia's project has a [mailing list](https://groups.google.com/forum/#!forum/gorgonia), as well as a [Twitter account](https://twitter.com/gorgoniaML). Official updates and announcements will be posted to those two sites.

#Usage#

Expand Down
2 changes: 1 addition & 1 deletion blas.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ func Use(b BLAS) {
// float32
}

// WhichBLAS() returns the BLAS that gorgonia uses.
// WhichBLAS returns the BLAS that gorgonia uses.
func WhichBLAS() BLAS { return whichblas }

func init() {
Expand Down
2 changes: 1 addition & 1 deletion broadcast.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ const (
// Do note that the current limitation of the BroadcastPattern allows only up to 4 dimensions per operand.
type BroadcastPattern byte

// helper function to create broadcast patterns
// NewBroadcastPattern is a helper function to create broadcast patterns
func NewBroadcastPattern(leftAxes, rightAxes []byte) BroadcastPattern {
var start byte
for _, a := range leftAxes {
Expand Down
5 changes: 5 additions & 0 deletions collections.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ func (ns Nodes) Swap(i, j int) { ns[i], ns[j] = ns[j], ns[i] }

// uses xtgo/set stuff

// Set returns a uniquifies slice. It mutates the slice.
func (ns Nodes) Set() Nodes {
sort.Sort(ns)
size := set.Uniq(ns)
Expand All @@ -40,6 +41,7 @@ func (ns Nodes) Add(n *Node) Nodes {
return ns
}

// Contains checks if the wanted node is in the set
func (ns Nodes) Contains(want *Node) bool {
for _, n := range ns {
if n == want {
Expand Down Expand Up @@ -112,6 +114,7 @@ func (ns Nodes) Difference(other Nodes) Nodes {
return s[:count]
}

// Intersect performs an intersection with other Nodes
func (ns Nodes) Intersect(other Nodes) Nodes {
sort.Sort(ns)
sort.Sort(other)
Expand All @@ -120,6 +123,7 @@ func (ns Nodes) Intersect(other Nodes) Nodes {
return s[:count]
}

// AllSameGraph returns true if all the nodes in the slice belong to the same graph. Note that constants do not have to belong to the same graph.
func (ns Nodes) AllSameGraph() bool {
if len(ns) == 0 {
return false
Expand All @@ -141,6 +145,7 @@ func (ns Nodes) AllSameGraph() bool {
return true
}

// Equals returns true if two Nodes are the same
func (ns Nodes) Equals(other Nodes) bool {
if len(ns) != len(other) {
return false
Expand Down
18 changes: 5 additions & 13 deletions compile.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import (
// This file deals with the compilation from a expression graph into a program
// that is executed by an interpreter

// Compile takes a graph and outputs a program suitable for *TapeMachine to run
// Compile takes a graph and outputs a program suitable for *tapeMachine to run
func Compile(g *ExprGraph) (prog *program, locMap map[*Node]register, err error) {
compileLogf("Compiling")
enterLoggingContext()
Expand All @@ -23,21 +23,11 @@ func Compile(g *ExprGraph) (prog *program, locMap map[*Node]register, err error)
return nil, nil, errors.Wrap(err, sortFail)
}

var inputs Nodes
for _, n := range g.leaves {
if n.isInput() {
inputs = append(inputs, n)
}
}

var outputs Nodes
for _, root := range g.Roots() {
outputs = append(outputs, root)
}
inputs := g.Inputs()

df := analyze(g, sortedNodes)

df.intervals = buildIntervals(sortedNodes)

ra := newRegalloc(df)
ra.alloc(sortedNodes)

Expand All @@ -55,6 +45,8 @@ func Compile(g *ExprGraph) (prog *program, locMap map[*Node]register, err error)
return
}

// CompileFunction takes a graph, subsets it based on the input and output nodes provided and outputs a program suitable for *tapeMachine to run.
// It is analogous to theano.Function().
func CompileFunction(g *ExprGraph, inputs, outputs Nodes) (prog *program, locMap map[*Node]register, err error) {
compileLogf("CompileFunctionNEW. Inputs: %d; outputs: %d", inputs, outputs)
enterLoggingContext()
Expand Down
2 changes: 1 addition & 1 deletion const.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ const (
binOpDoFail = "Failed to carry binOp.Do()"
binOpNodeFail = "Failed to carry binary operation %T"
applyFail = "Failed to carry Apply()"
binOpFail = "Binary operator recieved %d arguments"
binOpFail = "Binary operator received %d arguments"
hadamardProdFail = "Failed to carry hadamardProd()"
hadamardDivFail = "Failed to carry hadamardDiv()"
negFail = "Failed to carry Neg()"
Expand Down
2 changes: 1 addition & 1 deletion cuda.go
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ func (md ExternMetadata) ElemGridSize(n, dev int) (gridDimX, gridDimY, gridDimZ,
case blocks >= maxGridX*maxGridY:
gridDimX = maxGridX
gridDimY = maxGridY
gridDimY = calcBlocks(blocks%(maxGridX*maxGridY), maxGridZ)
gridDimZ = calcBlocks(blocks%(maxGridX*maxGridY), maxGridZ)
blockDimX = maxThreads
case blocks >= maxGridX:
gridDimX = maxGridX
Expand Down
8 changes: 6 additions & 2 deletions device.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,15 @@

package gorgonia

// Device represents the device where the code will be executed on. In this build, all code will run on the CPU
type Device int

const (
CPU Device = -1
CPU Device = -1 // CPU the only device the graph will be executed on
)

// String implements fmt.Stringer and runtime.Stringer
func (d Device) String() string { return "CPU" }
func (d Device) IsGPU() bool { return false }

// IsGPU will always return false in this build
func (d Device) IsGPU() bool { return false }
3 changes: 3 additions & 0 deletions device_cuda.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,11 @@ package gorgonia

import "github.com/chewxy/cu"

// Device represents the device where the code will be executed on. It can either be a GPU or CPU
type Device cu.Device

// CPU is the default the graph will be executed on.
const CPU = Device(cu.CPU)

// String implements fmt.Stringer and runtime.Stringer
func (d Device) String() string { return cu.Device(d).String() }
10 changes: 10 additions & 0 deletions differentiation.go
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,16 @@ func backwardDiffAnalysis(wrt, sortedNodes Nodes) (retVal NodeSet, err error) {
return diffSet, nil
}

// Backpropagate backpropagates errors by performing revers-emode symbolic differentiation, starting from the outputs, and working its way towads the inputs.
//
// This is the rough algorithm:
// 1. Filter out nodes that are unreachable
// 2. Forwards analysis, where a list of nodes affecting the output is added to consideration
// 3. Backwards analysis, where a list of nodes affected by differentiating the output are added to the consideration
// 4. If there is a difference in both sets, it will cause an error (both sets should be the same)
// 5. Traverse the graph from output towards input. On each visit, perform the symbolic differentiation
//
// For most cases, Grad() should be used instead of Backpropagate(), as Grad() performs several checks which would be the general use case, before calling Backpropagate()
func Backpropagate(outputs, gradOutputs, wrt Nodes) (retVal Nodes, err error) {
symdiffLogf("BACKPROP START")
symdiffLogf("Outputs: %d", outputs)
Expand Down
2 changes: 1 addition & 1 deletion doc.go
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
Gorgonia is a library that helps facilitate machine learning in Go.
Package gorgonia is a library that helps facilitate machine learning in Go.
Write and evaluate mathematical equations involving multidimensional arrays easily.
Do differentiation with them just as easily.
*/
Expand Down
14 changes: 6 additions & 8 deletions dual.go
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ func (dv *dualValue) Clone() (retVal Value, err error) {
}

func (dv *dualValue) Type() hm.Type { return TypeOf(dv.Value) }
func (dv *dualValue) Dtype() tensor.Dtype { return DtypeOf(dv.Value) }
func (dv *dualValue) Dtype() tensor.Dtype { return dv.Value.Dtype() }

func (dv *dualValue) String() string {
return fmt.Sprintf("%#+v", dv.Value)
Expand Down Expand Up @@ -116,10 +116,10 @@ func variableDV(val Value) *dualValue {

switch v := val.(type) {
case Scalar:
retVal.d = one(DtypeOf(v))
retVal.d = one(v.Dtype())
case tensor.Tensor:
shp := v.Shape()
dt := DtypeOf(v)
dt := v.Dtype()
retVal.d = tensor.Ones(dt, shp...)
default:
panic(fmt.Sprintf("%v(%T) not handled yet", v, v))
Expand Down Expand Up @@ -177,9 +177,8 @@ func dvBind(op Op, inputs []*dualValue) (retVal *dualValue, err error) {
var ret Value
if ret, err = op.Do(vals...); err == nil {
return dvUnit(ret), nil
} else {
return nil, errors.Wrap(err, opDoFail)
}
return nil, errors.Wrap(err, opDoFail)
}

// dvBindVar returns a dvUnitVar instead of dvUnit (which zeroes the derivative).
Expand All @@ -190,9 +189,8 @@ func dvBindVar(op Op, inputs []*dualValue) (retVal *dualValue, err error) {
var ret Value
if ret, err = op.Do(vals...); err == nil {
return dvUnitVar(ret), nil
} else {
return nil, errors.Wrap(err, opDoFail)
}
return nil, errors.Wrap(err, opDoFail)
}

//TODO test vecvecdot divBind0
Expand Down Expand Up @@ -250,7 +248,7 @@ func dvBindVar0(op Op, retVal *dualValue, inputs []*dualValue) (err error) {

switch v := retVal.d.(type) {
case Scalar:
retVal.d = one(DtypeOf(v))
retVal.d = one(v.Dtype())
case tensor.Tensor:
switch v.Dtype() {
case tensor.Float64:
Expand Down
1 change: 0 additions & 1 deletion equalities.go
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,6 @@ func constEq(a, b constant) (ok bool) {
default:
panic("Not yet implemented")
}
panic("unreachable")
}

// fastest comparisons to least fastest
Expand Down
2 changes: 0 additions & 2 deletions formatter.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,6 @@ import (
"reflect"
)

var ()

type mapFmt struct {
m reflect.Value // map
}
Expand Down
11 changes: 5 additions & 6 deletions graph.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,8 @@ import (
"github.com/gonum/graph"
)

// a dag is just a holding structure for a directed acyclic graph (of expressions). It's based on the
// reference implementation of gonum's simple example.
//
// The main difference is that the dag is add-only, and there is no removal of nodes
// ExprGraph is a data structure for a directed acyclic graph (of expressions). This structure is the main entry point
// for Gorgonia.
type ExprGraph struct {
name string

Expand Down Expand Up @@ -191,7 +189,7 @@ func (g *ExprGraph) String() string {

// ToDot generates the graph in graphviz format. The use of this is to generate for the entire graph
// which may have multiple trees with different roots
// TODO: This is getting unwieldly. Perhaps refactor out into a ToDot(...Opt)?
// TODO: This is getting unwieldy. Perhaps refactor out into a ToDot(...Opt)?
func (g *ExprGraph) ToDot() string {
gv := gographviz.NewEscape()
gv.SetName(fullGraphName)
Expand Down Expand Up @@ -498,7 +496,7 @@ func (g *ExprGraph) subgraph(ns Nodes, opts ...Nodes) *ExprGraph {
return retVal
}

// Subgraph is a function with overloaded meanings. If only one node is passed in, it assumes that the one node is the root,
// Subgraph subsets a graph. This function has overloaded meanings - If only one node is passed in, it assumes that the one node is the root,
// otherwise, it treats ns as the subset of nodes to be included in the subgraph
func (g *ExprGraph) Subgraph(ns ...*Node) *ExprGraph {
if len(ns) == 1 {
Expand All @@ -507,6 +505,7 @@ func (g *ExprGraph) Subgraph(ns ...*Node) *ExprGraph {
return g.subgraph(ns)
}

// SubgraphRoots creates a subgraph, assuming the provided nodes are roots to the new subgraph.
func (g *ExprGraph) SubgraphRoots(ns ...*Node) *ExprGraph {
sub := make(Nodes, len(ns))
copy(sub, ns)
Expand Down
2 changes: 1 addition & 1 deletion nn.go
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ func BinaryXent(output, target *Node) (retVal *Node, err error) {
}

// Dropout is a convenience function to implement dropout.
// It uses randomly zeroes out a *Tensor with a probabilty drawn from
// It uses randomly zeroes out a *Tensor with a probability drawn from
// a uniform distribution
func Dropout(x *Node, prob float64) (retVal *Node, err error) {
if prob == 0.0 {
Expand Down
9 changes: 5 additions & 4 deletions node.go
Original file line number Diff line number Diff line change
Expand Up @@ -45,14 +45,14 @@ type Node struct {
hashed bool
inferredShape bool // is shape inferred?
unchanged bool // has this node been modified
isStmt bool // is this a statment node
isStmt bool // is this a statement node
ofInterest bool // is this node of particular interest? (for debugging)
}

// NodeConsOpt is a function that provides construction options for any Node.
type NodeConsOpt func(*Node)

// WithType is a node construcion option to set a node to the specified type.
// WithType is a node construction option to set a node to the specified type.
// Types in *Node are immutable once set. If the type has already been specified in the node,
// a check will be made to see if the both types are the same. If it isn't, it will panic.
func WithType(t hm.Type) NodeConsOpt {
Expand Down Expand Up @@ -364,6 +364,7 @@ func (n *Node) Name() string {
return buf.String()
}

// WriteHash writes the hash to the provided Hash32.
func (n *Node) WriteHash(h hash.Hash32) {
fmt.Fprintf(h, "%v%v", n.t, n.shape)

Expand All @@ -385,7 +386,7 @@ func (n *Node) WriteHash(h hash.Hash32) {

}

// Hashcode() provides the hash for the tree, assuming that the node is the root of the tree.
// Hashcode provides the hash for the tree, assuming that the node is the root of the tree.
// Original implementation was here by Vatine (who's apparently 80 years old and using SO!?!):
// http://stackoverflow.com/questions/1988665/hashing-a-tree-structure
func (n *Node) Hashcode() uint32 {
Expand Down Expand Up @@ -545,7 +546,7 @@ func (n *Node) bindCopy(v Value) (err error) {
if copied, err = Copy(n.boundTo, v); err != nil {
return errors.Wrapf(err, "Failed to copy while binding to node")
}
n.boundTo = v // in case it's a scalar
n.boundTo = copied // in case it's a scalar
return nil
}

Expand Down
6 changes: 4 additions & 2 deletions noextern.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ package gorgonia
// CUDA indicates if this build is using CUDA
const CUDA = false

// ExternMetadata is used to hold metadata about external execution devices.
// In this build, it's an empty struct because the default build doesn't use external devices to execute the graph on
type ExternMetadata struct{}

func (m ExternMetadata) HasFunc(name string) bool { return false }
func (m ExternMetadata) Function(name string) (interface{}, error) { return nil, nil }
// HasFunc will always return false in this build
func (m ExternMetadata) HasFunc(name string) bool { return false }
3 changes: 2 additions & 1 deletion op.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
"github.com/pkg/errors"
)

// DimSizer is any type (typically a tensor.Shape) that allows querying for a dimension size given an input dimension.
type DimSizer interface {
DimSize(int) (int, error)
}
Expand Down Expand Up @@ -126,7 +127,7 @@ type SDOp interface {
SymDiff(inputs Nodes, output, grad *Node) (retVal Nodes, err error)
}

// a ReductionOp changes the shape of the node
// ReductionOp changes the shape of the node
type ReductionOp interface {
Op

Expand Down
2 changes: 1 addition & 1 deletion op_math.go
Original file line number Diff line number Diff line change
Expand Up @@ -510,7 +510,7 @@ func (op elemUnaryOp) do(inputs []Value, opts ...tensor.FuncOpt) (retVal Value,
}
retVal = t
case Scalar:
vt := DtypeOf(v)
vt := v.Dtype()
switch vt {
case tensor.Float32:
vs := v.(*F32)
Expand Down
4 changes: 2 additions & 2 deletions op_math_cuda.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ func (op elemUnaryOp) CUDADo(extern External, fromDevs []Device, toDev Device, p
return op.do(inputs)
}

name := fmt.Sprintf("%v%d", op.CUDAFuncName(), int(DtypeOf(a).Size())*8)
name := fmt.Sprintf("%v%d", op.CUDAFuncName(), int(a.Dtype().Size())*8)
if !extern.HasFunc(name) {
cudaLogf("extern does not have func %q", name)
return op.Do(inputs...)
Expand Down Expand Up @@ -99,7 +99,7 @@ func (op elemBinOp) CUDADo(extern External, fromDevs []Device, toDev Device, pre
return op.Do(inputs...)
}

name := fmt.Sprintf("%v%d", op.CUDAFuncName(), int(DtypeOf(a).Size())*8)
name := fmt.Sprintf("%v%d", op.CUDAFuncName(), int(a.Dtype().Size())*8)
if !extern.HasFunc(name) {
if prealloc != nil {
return op.UsePreallocDo(prealloc, inputs...)
Expand Down
Loading

0 comments on commit f9a1a2d

Please sign in to comment.