Version: from Julia v0.4 onwards (I use 0.5.0-dev+433 (2015-09-29 15:39 UTC))
Reference: Jake Bolewski: Staged programming in Julia
Problem: After watching Jakes Bolewski's speech regarding StaticVec
I did not catch the idea behind the example with length
function.
julia> type StaticVec{T,N}
vals::Vector{T}
end
julia> StaticVec(T,vals...) = StaticVec{T,length(vals)}([vals...])
StaticVec{T,N}
julia> v= StaticVec(Float64,1,2,3)
StaticVec{Float64,3}([1.0,2.0,3.0])
Non-staged length
:
julia> function Base.length{T,N}(v::StaticVec{T,N})
N
end
length (generic function with 58 methods)
julia> code_llvm(length, (StaticVec{Float64,3},))
define i64 @julia_length_21889(%jl_value_t*) {
top:
ret i64 3
}
and staged length
version
julia> @generated function Base.length{T,N}(v::StaticVec{T,N})
:(N)
end
length (generic function with 58 methods)
julia> code_llvm(length, (StaticVec{Float64,3},))
define i64 @julia_length_21888(%jl_value_t*) {
top:
ret i64 3
}
give the same llvm code.
I suppose I understand the idea behind staged programming, but in this particular example I don't understand the intention of the speaker. Could anyone explain it to me?
That example was probably not the best choice since, as you point out, it doesn't require generated functions at all. Jiahao Chen recently wrote a blog post with an excellent example of using a generated function to do efficient data smoothing. Here's a similar bit of code to his that improves efficiency even further by peeling off M
loop iterations at the front and back, thereby avoiding branches in the main loop body:
immutable SavitzkyGolayFilter{M,N} end
wrapL(i, n) = ifelse(1 ≤ i, i, i + n)
wrapR(i, n) = ifelse(i ≤ n, i, i - n)
@generated function smooth!{M,N}(
::Type{SavitzkyGolayFilter{M,N}},
data::AbstractVector,
smoothed::AbstractVector,
)
# compute filter coefficients from the Jacobian
J = Float64[(i-M-1)^(j-1) for i = 1:2M+1, j = 1:N+1]
e₁ = [1; zeros(N)]
C = J' \ e₁
# generate code to evaluate filter on data matrix
pre = :(for i = 1:$M end)
main = :(for i = $(M+1):n-$M end)
post = :(for i = n-$(M-1):n end)
for loop in (pre, main, post)
body = loop.args[2].args
push!(body, :(x = $(C[M+1]) * data[i]))
for j = reverse(1:M)
idx = loop !== pre ? :(i-$j) : :(wrapL(i-$j,n))
push!(body, :(x += $(C[M+1-j]) * data[$idx]))
end
for j = 1:M
idx = loop !== post ? :(i+$j) : :(wrapR(i+$j,n))
push!(body, :(x += $(C[M+1+j]) * data[$idx]))
end
push!(body, :(smoothed[i] = x))
end
quote
n = length(data)
n == length(smoothed) || throw(DimensionMismatch())
@inbounds $pre; @inbounds $main; @inbounds $post
return smoothed
end
end
smooth{S<:SavitzkyGolayFilter,T}(::Type{S}, data::AbstractVector{T}) =
smooth!(S, data, Vector{typeof(1.0*one(T))}(length(data)))
The code generated for smooth(SavitzkyGolayFilter{3,4}, rand(1000))
, for example, is the following:
n = length(data)
n == length(smoothed) || throw(DimensionMismatch())
@inbounds for i = 1:3
x = 0.5670995670995674 * data[i]
x += 0.02164502164502159 * data[wrapL(i - 3, n)]
x += -0.1298701298701297 * data[wrapL(i - 2, n)]
x += 0.32467532467532445 * data[wrapL(i - 1, n)]
x += 0.32467532467532473 * data[i + 1]
x += -0.12987012987013022 * data[i + 2]
x += 0.021645021645021724 * data[i + 3]
smoothed[i] = x
end
@inbounds for i = 4:n-3
x = 0.5670995670995674 * data[i]
x += 0.02164502164502159 * data[i - 3]
x += -0.1298701298701297 * data[i - 2]
x += 0.32467532467532445 * data[i - 1]
x += 0.32467532467532473 * data[i + 1]
x += -0.12987012987013022 * data[i + 2]
x += 0.021645021645021724 * data[i + 3]
smoothed[i] = x
end
@inbounds for i = n-2:n
x = 0.5670995670995674 * data[i]
x += 0.02164502164502159 * data[i - 3]
x += -0.1298701298701297 * data[i - 2]
x += 0.32467532467532445 * data[i - 1]
x += 0.32467532467532473 * data[wrapR(i + 1, n)]
x += -0.12987012987013022 * data[wrapR(i + 2, n)]
x += 0.021645021645021724 * data[wrapR(i + 3, n)]
smoothed[i] = x
end
return smoothed
As you might imagine, this generates very efficient machine code. I hope that clears up the concept of generated functions somewhat.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With