Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- julia> data, blocks = load(datarecipes()["ecg5000"]);
- julia> inblock = blocks[1];
- julia> backbone = FastTimeSeries.Models.StackedLSTM(inblock.nfeatures, 16, 10, 2);
- julia> xs = rand(Float32, inblock.nfeatures, 32, inblock.obslength);
- julia> out = backbone(xs);
- julia> size(out)
- (16, 32, 140)
- julia> Flux.outputsize(backbone, (inblock.nfeatures, 32, inblock.obslength); padbatch = false);
- ┌ Error: layer Recur(LSTMCell(1 => 10)), index 1 in Chain, gave an error with input of size (1, 32, 140)
- └ @ Flux ~/.julia/packages/Flux/js6mP/src/outputsize.jl:107
- ERROR: MethodError: no method matching (::Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}})(::Tuple{Matrix{Float32}, Matrix{Float32}}, ::SubArray{Flux.NilNumber.Nil, 2, Array{Flux.NilNumber.Nil, 3}, Tuple{Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, Int64}, true})
- Closest candidates are:
- (::Flux.LSTMCell{A, V, <:Tuple{AbstractMatrix{T}, AbstractMatrix{T}}})(::Any, ::Union{AbstractVector{T}, AbstractMatrix{T}, Flux.OneHotArray}) where {A, V, T} at ~/.julia/packages/Flux/js6mP/src/layers/recurrent.jl:231
- Stacktrace:
- [1] (::Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}})(x::SubArray{Flux.NilNumber.Nil, 2, Array{Flux.NilNumber.Nil, 3}, Tuple{Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, Int64}, true})
- @ Flux ~/.julia/packages/Flux/js6mP/src/layers/recurrent.jl:95
- [2] (::Flux.var"#283#284"{Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}})(x_t::SubArray{Flux.NilNumber.Nil, 2, Array{Flux.NilNumber.Nil, 3}, Tuple{Base.Slice{Base.OneTo{Int64}}, Base.Slice{Base.OneTo{Int64}}, Int64}, true})
- @ Flux ./none:0
- [3] iterate
- @ ./generator.jl:47 [inlined]
- [4] collect(itr::Base.Generator{Base.Generator{Base.OneTo{Int64}, Flux.var"#273#275"{Array{Flux.NilNumber.Nil, 3}, Tuple{Colon, Colon}}}, Flux.var"#283#284"{Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}}})
- @ Base ./array.jl:724
- [5] (::Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}})(x::Array{Flux.NilNumber.Nil, 3})
- @ Flux ~/.julia/packages/Flux/js6mP/src/layers/recurrent.jl:120
- [6] outputsize(m::Chain{Tuple{Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Flux.Recur{Flux.LSTMCell{Matrix{Float32}, Vector{Float32}, Tuple{Matrix{Float32}, Matrix{Float32}}}, Tuple{Matrix{Float32}, Matrix{Float32}}}}}, inputsizes::Tuple{Int64, Int64, Int64}; padbatch::Bool)
- @ Flux ~/.julia/packages/Flux/js6mP/src/outputsize.jl:104
- [7] top-level scope
- @ REPL[12]:1
- [8] top-level scope
- @ ~/.julia/packages/CUDA/tTK8Y/src/initialization.jl:52
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement