-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathdifferentiation.jl
75 lines (54 loc) · 1.6 KB
/
differentiation.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
using ForwardDiff, StaticArrays, BenchmarkTools
test_f(x) = sum(sin, x) + prod(tan, x) * sum(sqrt, x);
test_f2(x) = norm(inv(x))
test_f3(x) = norm(inv(x))
x = rand(5)
x2 = randn(3,3)
test_g = x -> ForwardDiff.gradient(test_f, x);
test_g2 = x -> ForwardDiff.gradient(test_f2, x);
test_g3 = x -> ForwardDiff.jacobian(test_f3, x);
#=
@show test_g(x)
@btime test_g(x)
@btime test_f(x)
=#
test_f1(x) = sum(sin, x)
# gradient assumes f maps to real numbers
# jacobian assumes output of f is an array
test_g1 = x -> ForwardDiff.gradient(test_f1, x);
x = 3.0
X = [x 1.0; 0 x]
sX = @SArray [x 1.0; 0 x]
test_f1(x)
test_g1([x])
@btime test_f1(x)
@btime test_f1(X)
@btime test_f1(sX)
tmp = [x]
@btime test_g1(tmp)
###
import Base.+, Base.*, Base./, Base.convert, Base.-, Base.promote_rule
struct DualNumber{T} <: Number
x::T
dx::T
end
function *(x::DualNumber{T}, y::DualNumber{T}) where T
return DualNumber{T}(x.x * y.x, x.dx * y.x + x.x * y.dx)
end
function +(x::DualNumber{T}, y::DualNumber{T}) where T
return DualNumber{T}(x.x + y.x, x.dx + y.dx)
end
function -(x::DualNumber{T}, y::DualNumber{T}) where T
return DualNumber{T}(x.x - y.x, x.dx - y.dx)
end
function /(x::DualNumber{T}, y::DualNumber{T}) where T
return DualNumber{T}(x.x / y.x, (x.dx * y.x - x.x * y.dx)/(y.x + y.x))
end
a = DualNumber(3.0, 1.0)
b = DualNumber(1.0, 0.0)
convert(::Type{DualNumber{Float64}}, x::Float64) = DualNumber{Float64}(x, zero(x))
promote_rule(::Type{DualNumber{Float64}}, ::Type{Float64}) = DualNumber{Float64}
DualNumber(x) = DualNumber(x, zero(typeof(x)))
println(b / a)
c = [a a; a a]
tmp = @SVector [1; 1; 1; 1]