Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- dump(learner)
- Learner
- model: Flux.Chain{Tuple{Flux.Chain{Tuple{Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.MaxPool{2, 4}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}}}, Flux.Chain{Tuple{Flux.Parallel{typeof(vcat), Tuple{Flux.AdaptiveMeanPool{4, 2}, Flux.AdaptiveMaxPool{4, 2}}}, typeof(MLUtils.flatten), Flux.Chain{Tuple{Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}, typeof(identity), Flux.Dense{typeof(NNlib.relu), Matrix{Float32}, Bool}}}, Flux.Chain{Tuple{Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}, typeof(identity), Flux.Dense{typeof(identity), Matrix{Float32}, Bool}}}}}}}
- layers: Tuple{Flux.Chain{Tuple{Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.MaxPool{2, 4}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}}}, Flux.Chain{Tuple{Flux.Parallel{typeof(vcat), Tuple{Flux.AdaptiveMeanPool{4, 2}, Flux.AdaptiveMaxPool{4, 2}}}, typeof(MLUtils.flatten), Flux.Chain{Tuple{Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}, typeof(identity), Flux.Dense{typeof(NNlib.relu), Matrix{Float32}, Bool}}}, Flux.Chain{Tuple{Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}, typeof(identity), Flux.Dense{typeof(identity), Matrix{Float32}, Bool}}}}}}
- 1: Flux.Chain{Tuple{Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.MaxPool{2, 4}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}}}
- layers: Tuple{Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.MaxPool{2, 4}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}, Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}}
- 1: Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}
- layers: Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}
- 1: Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}
- σ: identity (function of type typeof(identity))
- weight: Array{Float32}((3, 3, 3, 32)) [-0.6228971 -0.45270604 0.18772596; 0.07976723 0.012726379 -0.35602343; -0.5634569 -0.3862746 0.028778944;;; -0.121167526 0.12853527 0.073828205; 0.07397064 0.30849433 -0.36145356; -0.14903721 -0.36155963 -0.1412598;;; 0.56383353 0.90163064 0.5372785; 0.41688278 0.57620484 0.20665929; 0.68597907 0.7797802 0.61220956;;;; 0.27145606 0.47053307 0.6643835; 0.6616727 0.70229185 0.3597029; 0.83581984 -0.07549205 0.07092042;;; -1.3901415 -1.0986211 -1.8818237; -1.0358185 -0.9884816 -1.2193606; -0.97906834 -1.1071833 -1.420982;;; 0.36389422 0.599576 0.2023731; 0.995327 0.15926023 0.35122323; 0.66623634 -0.01476687 -0.35847715;;;; 0.849729 -0.14286755 0.18935755; 0.7032158 0.41288504 0.549591; 0.5856767 0.20481382 0.4430563;;; -0.86140007 -1.637157 -1.3969746; -1.0799056 -1.5378343 -1.843872; -0.7428982 -1.1910206 -1.3792411;;; 0.12516966 -0.07747632 -0.30945426; 0.44876707 -0.67609656 -0.13640974; 0.32248083 -0.25018722 -0.34930056;;;; … ;;;; 1.2356839 0.7943954 0.64405715; 1.1055567 1.048104 0.670613; 1.2246267 0.87034446 0.82917964;;; 0.00840347 -1.2916182 -0.52266234; -0.8927084 -0.24726932 -0.45814657; -0.04297699 -0.60880625 -0.75221705;;; 0.5864988 0.4322969 0.0070129903; 0.56319207 0.0770953 0.19533734; 0.34735975 0.20152913 0.2598005;;;; -0.89531213 -1.991288 -0.90119547; -0.1784513 -0.7721306 -0.18916023; -0.8775938 -0.8828231 -0.25546074;;; -2.0066998 -1.9587256 -1.9540242; -0.941955 -1.2707055 -1.2287105; -0.7233857 -1.0122621 -0.78689843;;; 1.6253178 1.4107839 1.5320448; 2.0367432 1.8668071 1.9505911; 2.5239944 2.0231075 2.2849576;;;; -0.66736937 -0.41646266 0.18140158; -0.37302488 -0.120915316 0.3530479; -0.53939545 -0.042539727 0.13244775;;; -0.87201303 -0.81783426 -0.14501327; -0.91739565 -1.2812762 -0.04524824; -1.5164549 -1.0456936 0.1279453;;; -0.24478842 0.36559737 0.56679714; -0.9469862 -0.33409342 0.506591; -0.46515203 -0.11704081 0.16974343]
- bias: Array{Float32}((32,)) Float32[0.45870218, 1.6750594, 0.26466796, 0.34264174, -0.54983026, 0.39555025, -0.08493881, -0.42704007, 0.66570723, -0.2032263 … -0.028375795, -0.37466305, 0.14592756, 1.7479923, 0.4381849, -0.3785748, -0.19711815, -0.51015633, 1.7521653, 0.47264507]
- stride: Tuple{Int64, Int64}
- pad: NTuple{4, Int64}
- dilation: Tuple{Int64, Int64}
- groups: Int64 1
- 2: Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}
- λ: relu (function of type typeof(NNlib.relu))
- β: Array{Float32}((32,)) Float32[0.6189856, 0.029828021, 0.60322595, -0.7778035, -1.9884398, 0.79331493, 0.4854119, 1.0648932, 0.82768005, -0.65227926 … 1.1576712, -1.3264399, 2.338, -2.672842, 0.36657676, -1.4750228, -0.9620088, 2.7165895, 2.058471, -1.5207441]
- γ: Array{Float32}((32,)) Float32[1.7110896, 1.9869008, 2.3066208, 0.3357683, 0.70436347, 0.90792996, 1.3186344, 2.7534206, -0.21001267, 0.19984382 … 1.3013526, 0.41716906, 1.9749262, -0.054377597, 2.2250168, 0.7057137, 0.34496078, 2.3395224, 3.6468847, 1.8438438]
- μ: Array{Float32}((32,)) Float32[0.991602, 1.5884961, -0.054575935, 0.21555018, -0.32266974, 0.8553943, 0.21458831, 0.6473691, 0.4388265, -0.70469844 … 0.21311998, -0.29928192, -0.1324045, 2.5106764, 1.079106, -0.09887982, -0.47889352, -1.0728786, 3.5874312, 0.62515175]
- σ²: Array{Float32}((32,)) Float32[19.061691, 32.812634, 111.763535, 16.209621, 72.477394, 43.659172, 52.939217, 60.196945, 157.33815, 22.131348 … 3.2249393, 41.2981, 16.281961, 238.65475, 19.449142, 78.399124, 18.105896, 53.66066, 105.76998, 95.82642]
- ϵ: Float32 1.0f-5
- momentum: Float32 0.1f0
- affine: Bool true
- track_stats: Bool true
- active: Nothing nothing
- chs: Int64 32
- 2: Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}
- layers: Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}
- 1: Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}
- σ: identity (function of type typeof(identity))
- weight: Array{Float32}((3, 3, 32, 64)) [0.12309115 0.096513756 0.22513509; 0.102684654 0.074072935 0.055383626; 0.1419728 0.029549204 0.22451778;;; 0.09738428 0.11244798 -0.016608527; 0.18411085 -0.0413606 0.076171726; 0.13217317 0.34393683 0.12943034;;; 0.012793227 0.20953394 0.19164681; 0.15106378 0.25222912 0.0113491565; 0.042089425 0.18754488 0.17233725;;; … ;;; 0.16311681 0.082002535 0.0365135; 0.017853715 0.049579192 0.09798461; 0.21880496 0.0176832 -0.07284778;;; 0.06641385 -0.13200743 -0.0928225; 0.012751172 -0.114320695 -0.1746536; 0.0315527 -0.043128647 0.05889693;;; 0.31474337 0.28808197 0.24967818; 0.3797815 0.3612135 0.32099423; 0.18741438 0.30687603 0.39741877;;;; 0.15091914 0.23177159 0.16057728; 0.2749026 0.3744271 0.24830686; 0.13791783 0.25995272 0.21711819;;; 0.14482492 0.11632209 0.21073857; 0.04696855 0.123586655 0.2209793; 0.13188928 0.014208372 0.07180148;;; -0.030749204 -0.014549437 -0.09288043; -0.13545504 -0.07408477 0.021056473; -0.08925637 -0.0946378 -0.10390064;;; … ;;; 0.22210866 0.30208555 0.22016405; 0.45997807 0.39394608 0.18111345; 0.39299753 0.30907866 0.347076;;; 0.04480817 0.092122786 -0.13604993; -0.008185623 -0.0717955 -0.048076823; 0.09130952 -0.053567354 0.018388905;;; -0.18568736 -0.12400962 -0.104919; -0.13006066 -0.15739155 0.11396596; 0.0008254793 -0.009531839 -0.078244075;;;; 0.10871276 0.12089125 -0.024060115; 0.10367907 -0.029651182 0.013086661; 0.13498285 -0.053227983 -0.1491332;;; -0.29136866 -0.13846414 -0.043760788; -0.17255469 -0.138035 -0.1159682; -0.06765659 -0.112094 -0.16525503;;; -0.4865254 -0.32772923 -0.13330008; -0.42590725 -0.33713737 -0.3230917; -0.36711195 -0.04653347 -0.08935336;;; … ;;; 0.5899668 0.5368703 0.5256972; 0.4559471 0.31147859 0.40561706; 0.269548 0.4050247 0.25834495;;; -1.1069013 -1.4215429 -1.2190584; -1.3303885 -1.7814963 -1.4701008; -1.3221347 -1.507736 -1.5136461;;; 0.11928751 0.080059595 0.09376048; -0.02374018 -0.04088516 0.18081057; 0.07040813 -0.14814371 0.1110147;;;; … ;;;; 0.26415822 0.2650189 0.31621864; 0.23022453 0.23281778 0.11937006; 0.16332737 0.1561928 0.27536562;;; -0.05294837 -0.06612764 -0.08159666; -0.09360858 0.029901061 -0.10968407; 0.02560177 -0.013727121 -0.07322843;;; 0.16485065 0.067685254 0.16604166; 0.044716667 -0.134529 0.004497062; 0.0820052 -0.016214283 0.09519673;;; … ;;; 0.3950531 0.17958419 0.15493982; 0.30908132 0.4419503 0.4124087; 0.40274444 0.32235765 0.0830516;;; 0.1599784 0.062983766 0.21791695; 0.25491965 0.21932305 0.27069694; 0.21038881 0.17972504 0.14194594;;; 0.1515782 0.10669196 0.28413683; 0.039226666 0.25106362 0.25085387; 0.167038 0.113748886 0.22491865;;;; 0.08003582 0.05422766 0.023555432; 0.024777832 0.12133836 0.08701734; 0.09341219 -0.024967626 0.032816842;;; 0.039026495 0.11083005 0.0037922477; 0.01260674 -0.065693475 -0.08848513; 0.023691244 -0.027520504 -0.29384202;;; 0.05540608 -0.01739945 -0.14498779; 0.03356454 -0.04964022 -0.09045817; -0.16015534 0.029092075 -0.11088025;;; … ;;; 0.11590049 0.2853804 0.15860167; 0.23457752 0.23067166 0.030949207; -0.016515754 0.09254872 -0.052669477;;; -0.03788733 0.009690789 -0.08045262; 0.28263438 0.07460654 -0.11884126; -0.038262993 -0.018022506 0.029937595;;; -0.2030993 0.049562406 -0.027646875; -0.23358913 -0.22652747 -0.22187945; -0.105145045 -0.27482387 0.010262072;;;; -0.24450046 -0.27822027 -0.37343848; -0.35807842 -0.35130763 -0.5030494; -0.567563 -0.43859652 -0.6052176;;; 0.29692018 0.26909828 0.2483256; 0.23884127 0.28830713 0.29682332; 0.27306715 0.20698616 0.27244547;;; 0.31171775 0.3161396 0.7493256; 0.33578008 0.49293095 0.80023074; 0.39339986 0.45134035 0.7438353;;; … ;;; -0.11635591 -0.14630328 -0.051253602; -0.23000103 -0.31247184 -0.27829295; -0.2384953 -0.23640914 -0.33179998;;; -0.06279065 -0.035671964 -0.06825111; 0.010673962 -0.1064326 -0.16751035; 0.14720385 -0.073051326 -0.09534922;;; 0.40973714 0.4715226 0.62366295; 0.42931515 0.50844806 0.4517463; 0.32270262 0.35767114 0.5660996]
- bias: Array{Float32}((64,)) Float32[0.1938347, -0.32085112, 0.09053659, 0.24596801, 0.5689236, -0.10670686, 0.15361135, 0.16769059, 0.46394488, -0.025959313 … -0.2984479, 0.33377334, -0.024496576, 0.1340538, -0.13564157, -0.15246452, -0.1074989, 0.12233376, 0.1241183, -0.39366192]
- stride: Tuple{Int64, Int64}
- pad: NTuple{4, Int64}
- dilation: Tuple{Int64, Int64}
- groups: Int64 1
- 2: Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}
- λ: relu (function of type typeof(NNlib.relu))
- β: Array{Float32}((64,)) Float32[-0.1151854, -1.0068103, 2.706683, 0.874459, 0.32899022, -0.74232876, -0.6892494, -0.31747308, 0.25284213, -0.092041336 … -1.0325212, -1.6787742, -0.98535264, -0.60890174, -0.59901804, -0.7167541, 2.809269, 0.05088432, -1.2189043, 1.376303]
- γ: Array{Float32}((64,)) Float32[-0.04811601, 0.1269143, 1.5105467, 1.3437995, 1.2671719, 0.42845696, 0.42192084, 0.8462748, 1.6616421, 1.2951772 … -0.6121009, 0.88744086, 0.50386214, 1.2333946, 0.34773254, -0.033606146, -1.1838198, 0.9853191, -0.74433804, 0.41932482]
- μ: Array{Float32}((64,)) Float32[3.5041356, 42.81533, -20.473255, -155.31325, 31.18697, -19.358017, -31.803423, 39.594006, 54.80977, -28.355078 … 5.702737, -85.354294, 31.698328, -11.545811, -26.227348, 24.89641, -39.0093, 44.248596, 18.139753, -38.921467]
- σ²: Array{Float32}((64,)) Float32[136.43045, 678.8815, 3299.9104, 10426.616, 623.0838, 80.376274, 416.00244, 1881.7112, 2043.1953, 1211.0273 … 25.143812, 2745.038, 666.9866, 1556.7311, 216.49112, 265.74197, 3640.7036, 538.20294, 158.57349, 2065.9873]
- ϵ: Float32 1.0f-5
- momentum: Float32 0.1f0
- affine: Bool true
- track_stats: Bool true
- active: Nothing nothing
- chs: Int64 64
- 3: Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}
- layers: Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}
- 1: Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}
- σ: identity (function of type typeof(identity))
- weight: Array{Float32}((3, 3, 64, 64)) [0.021408278 0.09626925 0.058577184; 0.015385787 0.09333608 0.13932662; 0.093343094 0.05219794 0.018929314;;; 0.13984261 0.09906983 0.116197385; 0.06955157 0.22240901 0.10894292; 0.20597218 0.17121434 0.06381315;;; -0.06578595 0.100769274 -0.056382287; -0.048510414 -0.04751092 -0.0708728; -0.04537043 -0.010264958 -0.08156217;;; … ;;; 0.13106172 0.05615019 0.12123497; 0.19582413 0.078362115 0.15956672; 0.007877325 -0.010508799 0.173022;;; 0.18646191 -0.0018993801 0.076029785; 0.03324543 0.0050497567 0.106029674; 0.13131358 0.15472682 0.04793886;;; 0.12746279 -0.04625614 0.02799181; -0.02501562 0.09293183 -0.018291337; -0.117341936 -0.06453718 0.07313726;;;; -0.061221812 -0.14866073 -0.13475883; -0.1889053 -0.35346082 -0.35804743; -0.08390366 -0.080716886 -0.04652826;;; 0.16117711 -0.06971354 -0.01795192; 0.010370963 0.07031711 -0.10671515; -0.018279217 0.055047173 -0.014023235;;; -0.8849477 -0.96958274 -0.7811981; -1.1045893 -1.2775524 -1.462677; -0.9633677 -1.1339468 -1.3265654;;; … ;;; -0.12783322 -0.079431795 -0.17715885; -0.14912961 -0.08845018 -0.18793915; -0.1104218 -0.1784009 -0.104845874;;; -0.0693028 -0.05825376 -0.005119728; -0.004142537 -0.06451489 -0.13426092; -0.0849647 -0.12786824 0.071425386;;; -0.32265761 -0.3304531 -0.3631116; -0.43903854 -0.35061514 -0.3744506; -0.5624459 -0.5481527 -0.5356398;;;; 0.063857555 -0.119441524 0.05644473; -0.064062275 -0.09213272 -0.14706317; -0.031056568 -0.050510116 -0.09416387;;; 0.035890445 0.02065523 -0.024662232; -0.019501667 -0.013183073 0.032916524; 0.044469554 0.005594043 0.0706688;;; 0.47572297 0.4505415 0.57160664; 0.48458946 0.37424392 0.41046706; 0.51797163 0.4581149 0.39266947;;; … ;;; 0.16684942 0.13493949 0.22073847; 0.018094167 0.1428133 0.13419062; 0.18068533 0.018646773 0.046777066;;; -0.1079008 0.03766372 -0.061376143; -0.11403615 -0.074901074 0.020107476; -0.17595029 -0.20499504 -0.055016924;;; -0.10304576 -0.09377046 -0.19700077; -0.043499377 -0.24926242 -0.29693294; -0.28029332 -0.09649345 -0.31215248;;;; … ;;;; -0.11204116 -0.07452128 -0.1595126; -0.068765596 -0.11274329 0.07321997; -0.14172316 -0.10687019 0.020963762;;; -0.04595928 -0.044595193 -0.19325909; -0.090917476 -0.14099321 -0.20167316; -0.070944756 -0.16729575 -0.08609381;;; 0.12574477 0.26175934 0.16904452; 0.2528565 0.15254441 0.14271723; 0.25861895 0.060198136 0.10893785;;; … ;;; -0.22470571 -0.36528242 -0.23575504; -0.38898936 -0.33045858 -0.21009456; -0.17997186 -0.19112185 -0.31719467;;; 0.00871798 0.097212784 0.07812173; 0.00029964134 0.036332123 0.09724896; -0.116848744 0.019632872 0.030091036;;; 0.1106468 0.19684508 0.063737884; 0.1323278 0.11302606 0.19333649; 0.13589624 0.25087616 0.11109062;;;; -0.19701782 -0.07589722 -0.10713951; -0.14192745 -0.04713686 -0.11230834; -0.10344933 -0.10277593 -0.07075757;;; 0.02076839 0.085208006 -0.085762486; -0.03851071 -0.03304212 -0.04814304; -0.013058766 -0.14574681 -0.090587996;;; 0.17521429 0.1832934 0.17910649; 0.20534883 0.16268592 0.15066393; 0.09949161 0.11191688 0.111105725;;; … ;;; -0.11313035 -0.06807514 -0.25512412; -0.112225145 -0.2914793 -0.2956613; -0.18201524 -0.11786293 -0.19325563;;; 0.059416026 -0.029348575 0.06328604; -0.09558472 -0.023441255 -0.03743595; 0.07511751 -0.063774586 0.005887615;;; -0.010652886 -0.014022581 0.059463136; 0.03770864 -0.0070532258 -0.058275003; -0.055767298 -0.16001096 -0.079582974;;;; 0.08890691 0.012035961 0.2050351; 0.087945506 0.07297599 0.09759968; 0.06231816 -0.0016036306 -0.02517921;;; 0.026253466 0.025720064 0.1662497; 0.028877307 0.17289703 0.07533222; 0.0065819602 -0.022230534 0.04954471;;; 0.3110161 0.36890006 0.3311997; 0.24884067 0.34959963 0.35623768; 0.30892506 0.26468563 0.48982427;;; … ;;; -0.020873662 -0.25318372 -0.113176554; -0.105274536 -0.090247795 -0.064127006; -0.09534847 -0.07962755 -0.06771076;;; -0.10403038 -0.06119253 -0.23658065; -0.07443181 -0.039023105 -0.11904001; -0.15275644 -0.09095555 -0.07346617;;; 0.14509644 0.18341033 0.14296968; 0.1000669 0.25742945 0.19007865; 0.17882751 0.21383631 0.26216435]
- bias: Array{Float32}((64,)) Float32[-0.073088065, -0.36743134, 0.05349605, -0.09768014, -0.010604041, -0.41519815, -0.067094535, -0.39662498, 0.2877811, -0.10296968 … 0.008398112, 0.44943282, -0.73957914, 0.09149756, -0.14514549, 0.04834562, -0.08533412, -0.014090195, -0.025582714, -0.081279024]
- stride: Tuple{Int64, Int64}
- pad: NTuple{4, Int64}
- dilation: Tuple{Int64, Int64}
- groups: Int64 1
- 2: Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}
- λ: relu (function of type typeof(NNlib.relu))
- β: Array{Float32}((64,)) Float32[-0.84672344, -0.26012757, 1.4990497, -0.011919169, 0.66305625, 1.4369799, -0.8488725, 0.7866893, 1.487084, 1.79422 … -0.86702293, -0.2408931, 1.2804271, 0.87181693, -0.7298483, -1.4155811, -0.7038333, 1.947866, 1.805574, -0.7683194]
- γ: Array{Float32}((64,)) Float32[0.7888931, 2.9004042, 1.9509327, 0.45354733, 1.6807778, 2.2157712, 1.1558096, 1.2502259, 2.8569412, 1.201968 … 0.7793589, 3.0169992, -0.5738261, 0.16780345, 0.1682949, 0.76206064, 0.43538103, 2.4002943, 3.1641538, 0.32216883]
- μ: Array{Float32}((64,)) Float32[8.089195, -142.4189, 53.106514, -31.026762, 7.551188, 10.948121, 10.342741, -25.76755, 93.96811, -30.791847 … 21.400917, -92.463326, -13.605045, -30.75712, 11.765198, 30.077248, 9.223973, -1.2655903, 12.230598, 21.161005]
- σ²: Array{Float32}((64,)) Float32[482.45242, 7340.0083, 1150.8917, 174.60309, 722.9034, 1994.129, 586.7296, 1016.9614, 3141.6738, 338.37222 … 76.739944, 5119.3364, 3211.9583, 1279.623, 181.13458, 85.42914, 161.14252, 2940.1226, 2038.8799, 235.38092]
- ϵ: Float32 1.0f-5
- momentum: Float32 0.1f0
- affine: Bool true
- track_stats: Bool true
- active: Nothing nothing
- chs: Int64 64
- 4: Flux.MaxPool{2, 4}
- k: Tuple{Int64, Int64}
- 1: Int64 3
- 2: Int64 3
- pad: NTuple{4, Int64}
- 1: Int64 1
- 2: Int64 1
- 3: Int64 1
- 4: Int64 1
- stride: Tuple{Int64, Int64}
- 1: Int64 2
- 2: Int64 2
- 5: Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}
- layers: Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}
- 1: FastVision.Models.ResBlock
- convs: Flux.Chain{Tuple{Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}}}}}
- idconv: identity (function of type typeof(identity))
- pool: identity (function of type typeof(identity))
- 2: FastVision.Models.ResBlock
- convs: Flux.Chain{Tuple{Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}}}}}
- idconv: identity (function of type typeof(identity))
- pool: identity (function of type typeof(identity))
- 6: Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}
- layers: Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}
- 1: FastVision.Models.ResBlock
- convs: Flux.Chain{Tuple{Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}}}}}
- idconv: Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}
- pool: Flux.MeanPool{2, 4}
- 2: FastVision.Models.ResBlock
- convs: Flux.Chain{Tuple{Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}}}}}
- idconv: identity (function of type typeof(identity))
- pool: identity (function of type typeof(identity))
- 7: Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}
- layers: Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}
- 1: FastVision.Models.ResBlock
- convs: Flux.Chain{Tuple{Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}}}}}
- idconv: Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}
- pool: Flux.MeanPool{2, 4}
- 2: FastVision.Models.ResBlock
- convs: Flux.Chain{Tuple{Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}}}}}
- idconv: identity (function of type typeof(identity))
- pool: identity (function of type typeof(identity))
- 8: Flux.Chain{Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}}
- layers: Tuple{FastVision.Models.ResBlock, FastVision.Models.ResBlock}
- 1: FastVision.Models.ResBlock
- convs: Flux.Chain{Tuple{Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}}}}}
- idconv: Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}
- pool: Flux.MeanPool{2, 4}
- 2: FastVision.Models.ResBlock
- convs: Flux.Chain{Tuple{Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(NNlib.relu), Vector{Float32}, Float32, Vector{Float32}}}}, Flux.Chain{Tuple{Flux.Conv{2, 4, typeof(identity), Array{Float32, 4}, Vector{Float32}}, Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}}}}}
- idconv: identity (function of type typeof(identity))
- pool: identity (function of type typeof(identity))
- 2: Flux.Chain{Tuple{Flux.Parallel{typeof(vcat), Tuple{Flux.AdaptiveMeanPool{4, 2}, Flux.AdaptiveMaxPool{4, 2}}}, typeof(MLUtils.flatten), Flux.Chain{Tuple{Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}, typeof(identity), Flux.Dense{typeof(NNlib.relu), Matrix{Float32}, Bool}}}, Flux.Chain{Tuple{Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}, typeof(identity), Flux.Dense{typeof(identity), Matrix{Float32}, Bool}}}}}
- layers: Tuple{Flux.Parallel{typeof(vcat), Tuple{Flux.AdaptiveMeanPool{4, 2}, Flux.AdaptiveMaxPool{4, 2}}}, typeof(MLUtils.flatten), Flux.Chain{Tuple{Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}, typeof(identity), Flux.Dense{typeof(NNlib.relu), Matrix{Float32}, Bool}}}, Flux.Chain{Tuple{Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}, typeof(identity), Flux.Dense{typeof(identity), Matrix{Float32}, Bool}}}}
- 1: Flux.Parallel{typeof(vcat), Tuple{Flux.AdaptiveMeanPool{4, 2}, Flux.AdaptiveMaxPool{4, 2}}}
- connection: vcat (function of type typeof(vcat))
- layers: Tuple{Flux.AdaptiveMeanPool{4, 2}, Flux.AdaptiveMaxPool{4, 2}}
- 1: Flux.AdaptiveMeanPool{4, 2}
- out: Tuple{Int64, Int64}
- 2: Flux.AdaptiveMaxPool{4, 2}
- out: Tuple{Int64, Int64}
- 2: flatten (function of type typeof(MLUtils.flatten))
- 3: Flux.Chain{Tuple{Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}, typeof(identity), Flux.Dense{typeof(NNlib.relu), Matrix{Float32}, Bool}}}
- layers: Tuple{Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}, typeof(identity), Flux.Dense{typeof(NNlib.relu), Matrix{Float32}, Bool}}
- 1: Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}
- λ: identity (function of type typeof(identity))
- β: Array{Float32}((1024,)) Float32[2.9535346, 2.9695199, 0.7636352, 0.108133726, -2.5341177, -2.564012, -2.7037675, -3.356126, 3.2200859, 2.681916 … 1.8254498, 1.7388748, -1.4431437, -2.845715, 2.252644, 0.44542652, 1.082831, 1.6310502, 1.4004306, 0.790929]
- γ: Array{Float32}((1024,)) Float32[-0.29546136, 0.12316175, -0.4333991, 0.04932005, -1.2182881, -0.88146466, -0.84309345, -1.6838812, 2.5601938, 1.9808251 … 0.19737825, 1.0077192, -0.86438835, -0.067379855, 0.015523051, 0.40822363, -0.06621101, 0.19198619, -0.027223261, 0.35390672]
- μ: Array{Float32}((1024,)) Float32[1.5728868, 4.2629867, 0.3171236, 1.3264793, 4.576622, 9.759755, 4.7374907, 9.741766, 2.984047f-38, 1.1904287f-37 … 1.453111, 3.6328132, 1.9341743, 4.2379427, 1.5239809, 3.9791052, 2.3251176, 5.7427497, 2.0479565, 6.710321]
- σ²: Array{Float32}((1024,)) Float32[0.9858076, 5.485429, 0.34528804, 2.7161274, 5.3330164, 19.455864, 3.9824073, 6.3979354, 2.069356f-38, 2.271622f-37 … 1.0526313, 3.0245724, 0.8016646, 2.324081, 1.4134144, 4.79146, 3.5393748, 8.056388, 2.2770483, 13.281125]
- ϵ: Float32 1.0f-5
- momentum: Float32 0.1f0
- affine: Bool true
- track_stats: Bool true
- active: Nothing nothing
- chs: Int64 1024
- 2: identity (function of type typeof(identity))
- 3: Flux.Dense{typeof(NNlib.relu), Matrix{Float32}, Bool}
- weight: Array{Float32}((512, 1024)) Float32[0.35254595 0.37300357 … -0.16560957 -0.12532672; -0.3787931 -0.42160413 … 0.1686524 0.20694582; … ; -0.1882783 -0.16154407 … -0.06835318 0.2215285; -0.0010631765 0.07925456 … -0.04693754 -0.049447425]
- bias: Bool false
- σ: relu (function of type typeof(NNlib.relu))
- 4: Flux.Chain{Tuple{Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}, typeof(identity), Flux.Dense{typeof(identity), Matrix{Float32}, Bool}}}
- layers: Tuple{Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}, typeof(identity), Flux.Dense{typeof(identity), Matrix{Float32}, Bool}}
- 1: Flux.BatchNorm{typeof(identity), Vector{Float32}, Float32, Vector{Float32}}
- λ: identity (function of type typeof(identity))
- β: Array{Float32}((512,)) Float32[-0.00042381042, 0.029745022, -0.00067011104, -0.00583051, 0.002128529, 0.012743832, 0.0022065798, -0.0006338556, -0.0025254688, -0.001961066 … -0.0021215933, -0.0016222714, 0.0139183765, 0.0004390899, -0.022228813, 0.040085506, 0.007011194, 0.027901476, -0.00077107997, 0.00029254716]
- γ: Array{Float32}((512,)) Float32[-0.00058319455, 0.13426286, -0.5411829, -0.26358172, -0.25671864, -0.39294815, 0.40810674, -0.0010609375, 0.02162393, -0.36817032 … 0.00089147367, 0.20706993, -0.26106647, -0.19039036, -0.25707412, -0.025029691, 0.00011088717, -0.0027701575, -0.011379168, 0.16919172]
- μ: Array{Float32}((512,)) Float32[212.21371, 1.5665601f-9, 6.4283237f-28, 309.00668, 4.7268787f-30, 2.2695532, 2.2247621f-38, 43.65119, 45.251583, 6.0f-45 … 223.92763, 2.7313798f-34, 3.9f-44, 7.129539f-30, 268.02872, 91.22722, 34.5089, 426.1488, 1.47f-43, 6.0f-45]
- σ²: Array{Float32}((512,)) Float32[9667.842, 4.8745587f-8, 5.509345f-26, 3399.5254, 1.3901229f-28, 298.86728, 5.4049f-37, 3208.8457, 8884.857, 3.0f-44 … 16198.979, 5.5080892f-33, 8.73f-43, 1.2498662f-28, 4318.5786, 3412.8137, 741.28326, 26796.139, 1.2211f-41, 8.5f-44]
- ϵ: Float32 1.0f-5
- momentum: Float32 0.1f0
- affine: Bool true
- track_stats: Bool true
- active: Nothing nothing
- chs: Int64 512
- 2: identity (function of type typeof(identity))
- 3: Flux.Dense{typeof(identity), Matrix{Float32}, Bool}
- weight: Array{Float32}((10, 512)) Float32[0.033576462 -0.049214333 … -0.020066774 0.015939686; 0.03776026 0.038943905 … -0.018442996 0.013342874; … ; 0.035780597 -0.027480626 … -0.021669282 0.020547876; 0.039229594 0.037363328 … -0.019827502 0.018211719]
- bias: Bool false
- σ: identity (function of type typeof(identity))
- data: FluxTraining.PropDict{Any}
- d: Dict{Symbol, Any}
- slots: Array{UInt8}((16,)) UInt8[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00]
- keys: Array{Symbol}((16,))
- 1: #undef
- 2: #undef
- 3: #undef
- 4: #undef
- 5: #undef
- ...
- 12: #undef
- 13: #undef
- 14: Symbol validation
- 15: #undef
- 16: #undef
- vals: Array{Any}((16,))
- 1: #undef
- 2: #undef
- 3: #undef
- 4: #undef
- 5: #undef
- ...
- 12: #undef
- 13: #undef
- 14: MLUtils.DataLoader{FastAI.TaskDataset{Tuple{ObsView{MLUtils.MappedData{:auto, typeof(loadfile), ObsView{MLDatasets.FileDataset{typeof(identity), String}, Vector{Int64}}}, UnitRange{Int64}}, ObsView{MLUtils.MappedData{:auto, typeof(parentname), ObsView{MLDatasets.FileDataset{typeof(identity), String}, Vector{Int64}}}, UnitRange{Int64}}}, SupervisedTask{NamedTuple{(:input, :target, :sample, :encodedsample, :x, :y, :ŷ, :pred), Tuple{Image{2}, Label{String}, Tuple{Image{2}, Label{String}}, Tuple{Bounded{2, FastVision.ImageTensor{2}}, FastAI.OneHotTensor{0, String}}, Bounded{2, FastVision.ImageTensor{2}}, FastAI.OneHotTensor{0, String}, FastAI.OneHotTensor{0, String}, Label{String}}}, Tuple{ProjectiveTransforms{2, NamedTuple{(:training, :validation, :inference), Tuple{DataAugmentation.BufferedThreadsafe, DataAugmentation.BufferedThreadsafe, DataAugmentation.Sequence{Tuple{DataAugmentation.CroppedProjectiveTransform{DataAugmentation.ScaleKeepAspect{2}, DataAugmentation.PadDivisible}, DataAugmentation.PinOrigin}}}}}, ImagePreprocessing{FixedPointNumbers.N0f8, 3, ColorTypes.RGB{FixedPointNumbers.N0f8}, Float32}, OneHot{DataType}}}, Validation}, Random._GLOBAL_RNG, Val{true}}
- data: FastAI.TaskDataset{Tuple{ObsView{MLUtils.MappedData{:auto, typeof(loadfile), ObsView{MLDatasets.FileDataset{typeof(identity), String}, Vector{Int64}}}, UnitRange{Int64}}, ObsView{MLUtils.MappedData{:auto, typeof(parentname), ObsView{MLDatasets.FileDataset{typeof(identity), String}, Vector{Int64}}}, UnitRange{Int64}}}, SupervisedTask{NamedTuple{(:input, :target, :sample, :encodedsample, :x, :y, :ŷ, :pred), Tuple{Image{2}, Label{String}, Tuple{Image{2}, Label{String}}, Tuple{Bounded{2, FastVision.ImageTensor{2}}, FastAI.OneHotTensor{0, String}}, Bounded{2, FastVision.ImageTensor{2}}, FastAI.OneHotTensor{0, String}, FastAI.OneHotTensor{0, String}, Label{String}}}, Tuple{ProjectiveTransforms{2, NamedTuple{(:training, :validation, :inference), Tuple{DataAugmentation.BufferedThreadsafe, DataAugmentation.BufferedThreadsafe, DataAugmentation.Sequence{Tuple{DataAugmentation.CroppedProjectiveTransform{DataAugmentation.ScaleKeepAspect{2}, DataAugmentation.PadDivisible}, DataAugmentation.PinOrigin}}}}}, ImagePreprocessing{FixedPointNumbers.N0f8, 3, ColorTypes.RGB{FixedPointNumbers.N0f8}, Float32}, OneHot{DataType}}}, Validation}
- data: Tuple{ObsView{MLUtils.MappedData{:auto, typeof(loadfile), ObsView{MLDatasets.FileDataset{typeof(identity), String}, Vector{Int64}}}, UnitRange{Int64}}, ObsView{MLUtils.MappedData{:auto, typeof(parentname), ObsView{MLDatasets.FileDataset{typeof(identity), String}, Vector{Int64}}}, UnitRange{Int64}}}
- 1: ObsView{MLUtils.MappedData{:auto, typeof(loadfile), ObsView{MLDatasets.FileDataset{typeof(identity), String}, Vector{Int64}}}, UnitRange{Int64}}
- data: MLUtils.MappedData{:auto, typeof(loadfile), ObsView{MLDatasets.FileDataset{typeof(identity), String}, Vector{Int64}}}
- indices: UnitRange{Int64}
- 2: ObsView{MLUtils.MappedData{:auto, typeof(parentname), ObsView{MLDatasets.FileDataset{typeof(identity), String}, Vector{Int64}}}, UnitRange{Int64}}
- data: MLUtils.MappedData{:auto, typeof(parentname), ObsView{MLDatasets.FileDataset{typeof(identity), String}, Vector{Int64}}}
- indices: UnitRange{Int64}
- task: SupervisedTask{NamedTuple{(:input, :target, :sample, :encodedsample, :x, :y, :ŷ, :pred), Tuple{Image{2}, Label{String}, Tuple{Image{2}, Label{String}}, Tuple{Bounded{2, FastVision.ImageTensor{2}}, FastAI.OneHotTensor{0, String}}, Bounded{2, FastVision.ImageTensor{2}}, FastAI.OneHotTensor{0, String}, FastAI.OneHotTensor{0, String}, Label{String}}}, Tuple{ProjectiveTransforms{2, NamedTuple{(:training, :validation, :inference), Tuple{DataAugmentation.BufferedThreadsafe, DataAugmentation.BufferedThreadsafe, DataAugmentation.Sequence{Tuple{DataAugmentation.CroppedProjectiveTransform{DataAugmentation.ScaleKeepAspect{2}, DataAugmentation.PadDivisible}, DataAugmentation.PinOrigin}}}}}, ImagePreprocessing{FixedPointNumbers.N0f8, 3, ColorTypes.RGB{FixedPointNumbers.N0f8}, Float32}, OneHot{DataType}}}
- blocks: NamedTuple{(:input, :target, :sample, :encodedsample, :x, :y, :ŷ, :pred), Tuple{Image{2}, Label{String}, Tuple{Image{2}, Label{String}}, Tuple{Bounded{2, FastVision.ImageTensor{2}}, FastAI.OneHotTensor{0, String}}, Bounded{2, FastVision.ImageTensor{2}}, FastAI.OneHotTensor{0, String}, FastAI.OneHotTensor{0, String}, Label{String}}}
- input: Image{2} Image{2}()
- target: Label{String}
- sample: Tuple{Image{2}, Label{String}}
- encodedsample: Tuple{Bounded{2, FastVision.ImageTensor{2}}, FastAI.OneHotTensor{0, String}}
- x: Bounded{2, FastVision.ImageTensor{2}}
- y: FastAI.OneHotTensor{0, String}
- ŷ: FastAI.OneHotTensor{0, String}
- pred: Label{String}
- encodings: Tuple{ProjectiveTransforms{2, NamedTuple{(:training, :validation, :inference), Tuple{DataAugmentation.BufferedThreadsafe, DataAugmentation.BufferedThreadsafe, DataAugmentation.Sequence{Tuple{DataAugmentation.CroppedProjectiveTransform{DataAugmentation.ScaleKeepAspect{2}, DataAugmentation.PadDivisible}, DataAugmentation.PinOrigin}}}}}, ImagePreprocessing{FixedPointNumbers.N0f8, 3, ColorTypes.RGB{FixedPointNumbers.N0f8}, Float32}, OneHot{DataType}}
- 1: ProjectiveTransforms{2, NamedTuple{(:training, :validation, :inference), Tuple{DataAugmentation.BufferedThreadsafe, DataAugmentation.BufferedThreadsafe, DataAugmentation.Sequence{Tuple{DataAugmentation.CroppedProjectiveTransform{DataAugmentation.ScaleKeepAspect{2}, DataAugmentation.PadDivisible}, DataAugmentation.PinOrigin}}}}}
- 2: ImagePreprocessing{FixedPointNumbers.N0f8, 3, ColorTypes.RGB{FixedPointNumbers.N0f8}, Float32}
- 3: OneHot{DataType}
- context: Validation Validation()
- batchsize: Int64 32
- buffer: Bool false
- partial: Bool true
- shuffle: Bool false
- parallel: Bool true
- collate: Val{true} Val{true}()
- rng: Random._GLOBAL_RNG Random._GLOBAL_RNG()
- 15: #undef
- 16: #undef
- ndel: Int64 0
- count: Int64 2
- age: UInt64 0x0000000000000002
- idxfloor: Int64 1
- maxprobe: Int64 0
- optimizer: Flux.Optimise.Adam
- eta: Float64 0.0001572965153518101
- beta: Tuple{Float64, Float64}
- 1: Float64 0.9
- 2: Float64 0.999
- epsilon: Float64 1.0e-8
- state: IdDict{Any, Any}
- ht: Array{Any}((256,))
- 1: Array{Float32}((256,)) Float32[0.6200117, -0.72974277, 5.113774, -2.148265, -1.577467, -1.5961236, -1.9828687, -2.3928611, -1.0618314, -2.5876248 … -2.3245878, -1.5333382, 5.439896, 0.62296313, -2.683208, -3.1357234, -1.4270524, -2.2430596, 0.06796829, -0.90764433]
- 2: Tuple{Vector{Float32}, Vector{Float32}, Vector{Float64}}
- 1: Array{Float32}((256,)) Float32[-0.00010033332, 6.0f-45, -1.2719469f-6, 6.0f-45, 1.5468966f-5, 6.0f-45, 6.0f-45, 6.0f-45, -7.68562f-40, 3.6114855f-8 … 6.7144956f-36, -2.2741284f-5, -6.3754283f-6, -2.4410043f-5, 1.0995537f-7, 6.0f-45, 6.0f-45, 2.3772896f-5, -3.202111f-5, -2.5265765f-8]
- 2: Array{Float32}((256,)) Float32[5.6967345f-8, 3.7049822f-10, 6.0200634f-8, 3.8210232f-10, 4.5520455f-8, 1.8074777f-9, 6.349883f-10, 2.930345f-9, 2.4571664f-10, 4.2823636f-10 … 8.5899715f-10, 5.8469762f-9, 3.9202728f-8, 3.6768267f-8, 9.399019f-10, 7.783588f-9, 2.769867f-10, 1.873037f-8, 1.6094528f-8, 1.0965677f-9]
- 3: Array{Float64}((2,)) [4.357176555101193e-62, 0.2614085035996406]
- 3: #undef
- 4: #undef
- 5: #undef
- ...
- 252: Tuple{Array{Float32, 4}, Array{Float32, 4}, Vector{Float64}}
- 1: Array{Float32}((3, 3, 128, 256)) [5.3491766f-7 5.2956483f-7 6.530874f-7; 4.4767572f-7 3.8958228f-7 4.151065f-7; 5.866547f-7 4.203742f-7 5.8921125f-7;;; 1.654692f-8 3.265833f-8 -7.991905f-9; -4.0465125f-7 -3.029541f-7 -2.8087058f-7; -4.1008406f-7 -5.546364f-7 -4.6810396f-7;;; -5.7051346f-7 -5.264178f-7 -6.145042f-7; -9.21001f-7 -8.379299f-7 -8.564298f-7; -8.6354623f-7 -1.0239155f-6 -9.714939f-7;;; … ;;; -6.0f-45 -6.0f-45 6.0f-45; -1.823f-42 -1.886f-42 -1.391f-42; 6.0f-45 -6.0f-45 6.0f-45;;; -5.779818f-11 -7.072923f-11 2.8241335f-11; -7.6939254f-11 1.8118855f-11 1.9459191f-11; -3.3401118f-12 2.8177082f-11 1.7779811f-10;;; 5.273619f-7 3.295204f-7 1.9680238f-7; 6.816469f-7 5.366817f-7 4.8819504f-7; 9.2146365f-7 7.275625f-7 7.064221f-7;;;; 6.0f-45 -6.0f-45 6.0f-45; 6.0f-45 6.0f-45 6.0f-45; 6.0f-45 6.0f-45 6.0f-45;;; -6.0f-45 -6.0f-45 6.0f-45; -6.0f-45 6.0f-45 6.0f-45; 6.0f-45 6.0f-45 6.0f-45;;; -6.0f-45 -6.0f-45 6.0f-45; -6.0f-45 6.0f-45 6.0f-45; -6.0f-45 -6.0f-45 6.0f-45;;; … ;;; -6.0f-45 -6.0f-45 -6.0f-45; -6.0f-45 -6.0f-45 -6.0f-45; -6.0f-45 -6.0f-45 -6.0f-45;;; 6.0f-45 6.0f-45 6.0f-45; 6.0f-45 6.0f-45 6.0f-45; -6.0f-45 6.0f-45 -6.0f-45;;; -6.0f-45 -6.0f-45 -6.0f-45; -6.0f-45 -6.0f-45 -6.0f-45; -6.0f-45 -6.0f-45 -6.0f-45;;;; -6.782f-8 -1.6056175f-7 -6.3698076f-7; 4.0166896f-8 -8.457641f-8 -5.182219f-7; 2.3643915f-10 -3.8495788f-8 -4.584222f-7;;; -6.667246f-7 -6.5973893f-7 -7.42495f-7; -3.379108f-7 -3.1147397f-7 -4.1230277f-7; -3.6075726f-8 1.03692216f-7 -1.8573743f-8;;; -9.0062935f-7 -8.501373f-7 -9.801602f-7; -5.218214f-7 -4.2153573f-7 -4.9490643f-7; -1.6831507f-7 7.148986f-8 -1.2196591f-8;;; … ;;; -6.0f-45 -6.0f-45 6.0f-45; -4.0f-43 -2.52f-43 -7.7f-44; -6.0f-45 -6.0f-45 6.0f-45;;; 8.704696f-10 3.8454812f-10 6.262999f-10; -3.5695394f-10 -1.00188226f-10 -4.3812617f-10; 6.610906f-10 3.0011815f-10 8.8537105f-10;;; 5.639524f-7 5.3693464f-7 1.7611875f-7; 4.89316f-7 4.2974094f-7 9.337145f-8; 1.00201305f-7 7.6712325f-8 -1.36696f-7;;;; … ;;;; 5.3385143f-8 3.069111f-8 6.730954f-8; 5.0752885f-8 5.7430658f-8 1.0993884f-7; -1.5196289f-8 -1.404835f-8 3.8355996f-8;;; -7.295166f-8 -1.2115318f-7 -1.6373238f-7; -9.852056f-8 -1.3846741f-7 -1.7666632f-7; -1.3004968f-7 -1.5733329f-7 -2.0247685f-7;;; -7.7361435f-8 -1.4775988f-7 -1.9044647f-7; -1.3383891f-7 -1.9217579f-7 -2.3502741f-7; -2.2050759f-7 -2.6298488f-7 -3.0991546f-7;;; … ;;; -6.0f-45 -6.0f-45 -6.0f-45; -2.52f-43 -2.98f-43 -2.27f-43; -6.0f-45 -6.0f-45 -6.0f-45;;; 9.957772f-12 9.218517f-12 3.988612f-12; -3.9285038f-13 1.3259229f-12 7.268436f-14; 3.5451306f-12 -2.1679085f-13 -2.6996515f-13;;; 3.110867f-8 8.188571f-8 6.926954f-8; 3.3040557f-8 7.0044315f-8 5.199965f-8; -3.864242f-8 -7.265657f-9 -1.30843185f-8;;;; -5.243161f-7 -5.0170706f-7 -4.880694f-7; -4.4978896f-7 -4.5823788f-7 -4.6482648f-7; -4.6187026f-7 -4.6984027f-7 -4.96046f-7;;; -3.469216f-7 -2.7425858f-7 -2.7340093f-7; -1.9244176f-7 -1.3609161f-7 -1.7654146f-7; -1.8972129f-7 -8.319453f-8 -1.1386368f-7;;; -3.3198793f-7 -2.4869206f-7 -2.4732194f-7; -2.0464779f-7 -1.4187195f-7 -1.6714642f-7; -1.977924f-7 -9.340086f-8 -1.1242093f-7;;; … ;;; -6.0f-45 -6.0f-45 -6.0f-45; 1.0f-44 1.1f-44 1.0f-44; -6.0f-45 -6.0f-45 -6.0f-45;;; 1.1922419f-10 3.4643722f-11 1.11700496f-10; -3.3807138f-11 -1.949478f-11 -8.0378405f-11; 3.4878683f-10 4.7693342f-11 3.8214718f-10;;; 3.0106912f-7 2.865125f-7 2.5332162f-7; 3.3530912f-7 2.9782734f-7 3.0194352f-7; 2.4155406f-7 2.1317221f-7 2.0339787f-7;;;; -4.3698115f-10 -3.589038f-10 -4.4318613f-10; 4.6657095f-10 3.4230258f-10 3.5075637f-10; 1.0202943f-9 9.444542f-10 8.8351926f-10;;; -2.0843047f-10 -1.9328464f-10 -4.3688572f-10; -1.2015666f-10 -1.4340851f-10 -2.3311492f-10; 1.6405893f-10 4.2109784f-11 -1.10427445f-10;;; -1.7310621f-10 -2.4749292f-10 -5.2297555f-10; -2.4845198f-10 -3.6904726f-10 -4.8208804f-10; 1.381504f-10 -2.122887f-11 -1.9209408f-10;;; … ;;; -6.0f-45 -6.0f-45 -6.0f-45; 6.3f-44 6.6f-44 5.0f-44; -6.0f-45 -6.0f-45 6.0f-45;;; 3.444752f-15 -3.7225307f-15 -1.2179592f-14; -2.680513f-15 -1.1066718f-15 1.26104f-15; -9.3627185f-15 -1.4325211f-14 -1.9864573f-14;;; 2.9800165f-10 1.9799273f-10 2.5345864f-10; -2.203588f-10 -2.546094f-10 -1.0444019f-10; 2.6499858f-10 1.8158325f-10 3.828833f-10]
- 2: Array{Float32}((3, 3, 128, 256)) [4.2930416f-11 4.1666014f-11 3.2864746f-11; 4.3537056f-11 5.2110632f-11 5.079044f-11; 3.950816f-11 4.395711f-11 4.16461f-11;;; 3.5669457f-11 3.359347f-11 3.7209666f-11; 3.309042f-11 3.4160477f-11 4.11629f-11; 3.9031108f-11 3.6863717f-11 4.0182503f-11;;; 6.9013656f-11 6.272415f-11 5.7038475f-11; 6.1825545f-11 6.044883f-11 6.083202f-11; 6.324721f-11 5.829085f-11 6.305426f-11;;; … ;;; 1.7501209f-11 1.9643358f-11 1.6233691f-11; 1.7908201f-11 1.9781f-11 1.6912098f-11; 1.7250913f-11 2.0274296f-11 1.2851472f-11;;; 6.095539f-11 6.8701385f-11 4.8174756f-11; 4.5701012f-11 4.4095137f-11 3.462645f-11; 4.8445425f-11 5.651692f-11 3.5259795f-11;;; 1.0023479f-10 1.08070886f-10 9.2585f-11; 8.63253f-11 8.623956f-11 7.43563f-11; 7.884395f-11 8.949684f-11 7.2475484f-11;;;; 5.2906634f-11 5.8083722f-11 5.3697147f-11; 4.5304385f-11 3.7244704f-11 5.5661756f-11; 2.1036829f-11 2.9059482f-11 3.0633093f-11;;; 3.611843f-11 4.3307444f-11 2.3761694f-11; 2.3659732f-11 4.0493338f-11 2.619681f-11; 3.919175f-11 3.51325f-11 2.417956f-11;;; 1.4513581f-11 2.2565332f-11 2.3138883f-11; 1.9529345f-11 2.41387f-11 1.9783255f-11; 1.934587f-11 3.712542f-11 2.965738f-11;;; … ;;; 4.47206f-11 4.704828f-11 5.5393617f-11; 1.2103846f-10 7.143209f-11 6.187655f-11; 7.707984f-11 5.4059663f-11 4.7205195f-11;;; 4.764931f-11 4.99266f-11 2.5800041f-11; 3.334571f-11 3.215072f-11 2.2341006f-11; 4.9825803f-11 4.1055086f-11 1.2021344f-11;;; 2.703441f-11 1.9230673f-11 1.3775334f-11; 2.5786508f-11 2.5897288f-11 2.0561077f-11; 2.3950468f-11 1.986199f-11 1.3830769f-11;;;; 1.41918265f-11 1.3810589f-11 3.8577513f-11; 1.293495f-11 2.03882f-11 3.3431837f-11; 1.4089607f-11 2.2154494f-11 4.2062333f-11;;; 3.28359f-11 1.6729985f-11 4.021012f-11; 1.5207016f-11 1.3272613f-11 1.6627068f-11; 5.4141518f-11 3.7761613f-11 5.992498f-11;;; 2.77131f-11 6.9221025f-11 5.181042f-11; 2.5767272f-11 3.272665f-11 3.5653425f-11; 4.270755f-11 4.512376f-11 3.7210537f-11;;; … ;;; 6.114619f-11 6.112398f-11 6.041425f-11; 5.7643786f-11 5.5282962f-11 5.3545206f-11; 7.0846905f-11 4.6969824f-11 6.3107325f-11;;; 1.0601617f-11 1.1980331f-11 8.3235875f-12; 1.109045f-11 1.1431926f-11 5.2317096f-12; 8.682034f-12 7.944282f-12 7.807156f-12;;; 6.390059f-11 6.721497f-11 6.502095f-11; 5.4073333f-11 6.087088f-11 5.581854f-11; 4.293102f-11 4.5193103f-11 4.8942357f-11;;;; … ;;;; 2.7866551f-11 2.740936f-11 2.2322422f-11; 2.9062535f-11 2.972389f-11 2.1799264f-11; 2.8150112f-11 3.2549997f-11 2.055561f-11;;; 7.405127f-11 6.744314f-11 6.989203f-11; 9.769555f-11 7.85644f-11 6.1749245f-11; 4.408085f-11 2.7343742f-11 2.8835396f-11;;; 9.347594f-11 7.93543f-11 6.635453f-11; 8.273369f-11 7.2172025f-11 5.102951f-11; 7.315568f-11 5.884979f-11 2.9728074f-11;;; … ;;; 4.2960187f-11 4.415649f-11 4.3767375f-11; 5.9226006f-11 7.632116f-11 6.645016f-11; 3.10128f-11 4.3680434f-11 4.7933577f-11;;; 4.5483565f-11 6.6025324f-11 4.6686512f-11; 4.3143024f-11 6.323066f-11 4.8727477f-11; 3.7659983f-11 6.246608f-11 5.726989f-11;;; 1.9589611f-11 2.1671071f-11 2.3957138f-11; 2.9161184f-11 3.5535064f-11 3.5591537f-11; 2.8787098f-11 2.9600114f-11 3.188972f-11;;;; 1.11033864f-11 9.520288f-12 9.843872f-12; 1.30889015f-11 1.5657815f-11 1.14366615f-11; 1.014514f-11 1.12568375f-11 7.728902f-12;;; 8.396381f-12 7.849073f-12 6.3088866f-12; 1.03124185f-11 8.734096f-12 8.417657f-12; 1.3116469f-11 9.768042f-12 1.10876195f-11;;; 6.3640052f-12 4.800402f-12 4.634995f-12; 5.091943f-12 4.0181127f-12 4.799376f-12; 5.2336794f-12 4.7633854f-12 6.5084977f-12;;; … ;;; 5.860294f-12 4.1402177f-12 5.230657f-12; 6.4243363f-12 6.7689153f-12 8.492828f-12; 7.68047f-12 7.855847f-12 8.565225f-12;;; 2.1944307f-11 2.962619f-11 1.7938628f-11; 2.0968354f-11 3.034912f-11 1.4133649f-11; 1.2472251f-11 2.3983572f-11 1.24011105f-11;;; 2.8257336f-11 2.3746777f-11 2.0692056f-11; 2.0107221f-11 1.8413512f-11 1.3561152f-11; 1.474543f-11 1.2927899f-11 9.9089365f-12;;;; 1.384362f-11 2.1574824f-11 1.6361523f-11; 1.6671626f-11 2.3293643f-11 2.4164998f-11; 1.435747f-11 1.3373895f-11 1.3863871f-11;;; 1.08382955f-11 1.1229592f-11 1.4884623f-11; 2.1466622f-11 1.754284f-11 1.5882216f-11; 1.4177706f-11 1.1621503f-11 1.2640329f-11;;; 8.614912f-12 1.061684f-11 1.140015f-11; 1.6172676f-11 1.3897522f-11 1.2361682f-11; 1.8409486f-11 1.4439278f-11 1.25837864f-11;;; … ;;; 1.2598293f-11 4.902795f-12 9.684192f-12; 7.607902f-12 5.5925308f-12 4.9466043f-12; 1.1131062f-11 1.0584215f-11 8.169186f-12;;; 1.0650065f-11 1.263893f-11 1.3848391f-11; 1.38897765f-11 1.4798865f-11 1.0496878f-11; 2.0693931f-11 1.7718528f-11 1.2752675f-11;;; 3.0695384f-11 2.994405f-11 1.8937385f-11; 1.855006f-11 1.9687047f-11 1.521557f-11; 2.3741405f-11 2.1360828f-11 1.793172f-11]
- 3: Array{Float64}((2,)) [4.357176555101193e-62, 0.2614085035996406]
- 253: Array{Float32}((128,)) Float32[0.36903876, 0.31740677, 0.93585, 0.1871628, -0.2116408, 0.04046653, 0.2173404, 0.093711, -1.1932013, -0.1260338 … -0.87436074, 0.1562037, 0.6081963, 0.11023683, 0.63319623, -0.032162294, 0.8218119, -0.7798629, 0.09221165, 1.3291281]
- 254: Tuple{Vector{Float32}, Vector{Float32}, Vector{Float64}}
- 1: Array{Float32}((128,)) Float32[0.00035063864, 0.00020014516, -0.00017100076, 6.252455f-5, 4.337115f-5, -2.1966172f-5, -0.00043491757, 0.00025996845, 2.8947086f-5, 0.0003462534 … 8.209359f-6, 5.6840912f-5, -5.7910775f-6, -2.9552148f-5, 0.0005619626, -0.0006304973, 3.3584372f-5, -1.56458f-40, 7.389515f-8, 0.00039968092]
- 2: Array{Float32}((128,)) Float32[1.1818578f-5, 2.172948f-5, 8.507747f-5, 1.4292172f-5, 1.1791617f-5, 2.5920797f-5, 1.8895422f-5, 2.5636076f-5, 9.632937f-5, 8.490027f-5 … 1.7745837f-5, 3.5991343f-5, 3.3829387f-5, 3.101913f-5, 2.2281214f-5, 2.1954802f-5, 6.295659f-5, 3.9299408f-5, 1.098297f-5, 6.0433693f-5]
- 3: Array{Float64}((2,)) [4.357176555101193e-62, 0.2614085035996406]
- 255: Array{Float32}((10, 512)) Float32[0.033576462 -0.049214333 … -0.020066774 0.015939686; 0.03776026 0.038943905 … -0.018442996 0.013342874; … ; 0.035780597 -0.027480626 … -0.021669282 0.020547876; 0.039229594 0.037363328 … -0.019827502 0.018211719]
- 256: Tuple{Matrix{Float32}, Matrix{Float32}, Vector{Float64}}
- 1: Array{Float32}((10, 512)) Float32[-7.294715f-6 0.0003248461 … -1.000982f-5 2.8713587f-6; -3.621038f-7 0.00016387898 … -3.9096108f-6 1.2135952f-6; … ; -2.2342454f-6 7.924215f-5 … -3.0480571f-6 1.2693619f-6; -9.698299f-6 0.0005907443 … -1.4308234f-5 5.9499575f-6]
- 2: Array{Float32}((10, 512)) Float32[0.00019505221 0.0002236644 … 0.00019864619 0.00013934891; 0.00024802377 0.00015932412 … 0.00016954243 0.00023665067; … ; 0.00018386553 0.00013471076 … 9.8051714f-5 0.00014449241; 0.00034935318 0.00036737032 … 0.00014652126 0.00016910801]
- 3: Array{Float64}((2,)) [4.357176555101193e-62, 0.2614085035996406]
- count: Int64 94
- ndel: Int64 0
- lossfn: logitcrossentropy (function of type typeof(Flux.Losses.logitcrossentropy))
- params: Zygote.Params{Zygote.Buffer{Any, Vector{Any}}}
- order: Zygote.Buffer{Any, Vector{Any}}
- data: Array{Any}((94,))
- 1: Array{Float32}((3, 3, 3, 32)) [-0.6228971 -0.45270604 0.18772596; 0.07976723 0.012726379 -0.35602343; -0.5634569 -0.3862746 0.028778944;;; -0.121167526 0.12853527 0.073828205; 0.07397064 0.30849433 -0.36145356; -0.14903721 -0.36155963 -0.1412598;;; 0.56383353 0.90163064 0.5372785; 0.41688278 0.57620484 0.20665929; 0.68597907 0.7797802 0.61220956;;;; 0.27145606 0.47053307 0.6643835; 0.6616727 0.70229185 0.3597029; 0.83581984 -0.07549205 0.07092042;;; -1.3901415 -1.0986211 -1.8818237; -1.0358185 -0.9884816 -1.2193606; -0.97906834 -1.1071833 -1.420982;;; 0.36389422 0.599576 0.2023731; 0.995327 0.15926023 0.35122323; 0.66623634 -0.01476687 -0.35847715;;;; 0.849729 -0.14286755 0.18935755; 0.7032158 0.41288504 0.549591; 0.5856767 0.20481382 0.4430563;;; -0.86140007 -1.637157 -1.3969746; -1.0799056 -1.5378343 -1.843872; -0.7428982 -1.1910206 -1.3792411;;; 0.12516966 -0.07747632 -0.30945426; 0.44876707 -0.67609656 -0.13640974; 0.32248083 -0.25018722 -0.34930056;;;; … ;;;; 1.2356839 0.7943954 0.64405715; 1.1055567 1.048104 0.670613; 1.2246267 0.87034446 0.82917964;;; 0.00840347 -1.2916182 -0.52266234; -0.8927084 -0.24726932 -0.45814657; -0.04297699 -0.60880625 -0.75221705;;; 0.5864988 0.4322969 0.0070129903; 0.56319207 0.0770953 0.19533734; 0.34735975 0.20152913 0.2598005;;;; -0.89531213 -1.991288 -0.90119547; -0.1784513 -0.7721306 -0.18916023; -0.8775938 -0.8828231 -0.25546074;;; -2.0066998 -1.9587256 -1.9540242; -0.941955 -1.2707055 -1.2287105; -0.7233857 -1.0122621 -0.78689843;;; 1.6253178 1.4107839 1.5320448; 2.0367432 1.8668071 1.9505911; 2.5239944 2.0231075 2.2849576;;;; -0.66736937 -0.41646266 0.18140158; -0.37302488 -0.120915316 0.3530479; -0.53939545 -0.042539727 0.13244775;;; -0.87201303 -0.81783426 -0.14501327; -0.91739565 -1.2812762 -0.04524824; -1.5164549 -1.0456936 0.1279453;;; -0.24478842 0.36559737 0.56679714; -0.9469862 -0.33409342 0.506591; -0.46515203 -0.11704081 0.16974343]
- 2: Array{Float32}((32,)) Float32[0.45870218, 1.6750594, 0.26466796, 0.34264174, -0.54983026, 0.39555025, -0.08493881, -0.42704007, 0.66570723, -0.2032263 … -0.028375795, -0.37466305, 0.14592756, 1.7479923, 0.4381849, -0.3785748, -0.19711815, -0.51015633, 1.7521653, 0.47264507]
- 3: Array{Float32}((32,)) Float32[0.6189856, 0.029828021, 0.60322595, -0.7778035, -1.9884398, 0.79331493, 0.4854119, 1.0648932, 0.82768005, -0.65227926 … 1.1576712, -1.3264399, 2.338, -2.672842, 0.36657676, -1.4750228, -0.9620088, 2.7165895, 2.058471, -1.5207441]
- 4: Array{Float32}((32,)) Float32[1.7110896, 1.9869008, 2.3066208, 0.3357683, 0.70436347, 0.90792996, 1.3186344, 2.7534206, -0.21001267, 0.19984382 … 1.3013526, 0.41716906, 1.9749262, -0.054377597, 2.2250168, 0.7057137, 0.34496078, 2.3395224, 3.6468847, 1.8438438]
- 5: Array{Float32}((3, 3, 32, 64)) [0.12309115 0.096513756 0.22513509; 0.102684654 0.074072935 0.055383626; 0.1419728 0.029549204 0.22451778;;; 0.09738428 0.11244798 -0.016608527; 0.18411085 -0.0413606 0.076171726; 0.13217317 0.34393683 0.12943034;;; 0.012793227 0.20953394 0.19164681; 0.15106378 0.25222912 0.0113491565; 0.042089425 0.18754488 0.17233725;;; … ;;; 0.16311681 0.082002535 0.0365135; 0.017853715 0.049579192 0.09798461; 0.21880496 0.0176832 -0.07284778;;; 0.06641385 -0.13200743 -0.0928225; 0.012751172 -0.114320695 -0.1746536; 0.0315527 -0.043128647 0.05889693;;; 0.31474337 0.28808197 0.24967818; 0.3797815 0.3612135 0.32099423; 0.18741438 0.30687603 0.39741877;;;; 0.15091914 0.23177159 0.16057728; 0.2749026 0.3744271 0.24830686; 0.13791783 0.25995272 0.21711819;;; 0.14482492 0.11632209 0.21073857; 0.04696855 0.123586655 0.2209793; 0.13188928 0.014208372 0.07180148;;; -0.030749204 -0.014549437 -0.09288043; -0.13545504 -0.07408477 0.021056473; -0.08925637 -0.0946378 -0.10390064;;; … ;;; 0.22210866 0.30208555 0.22016405; 0.45997807 0.39394608 0.18111345; 0.39299753 0.30907866 0.347076;;; 0.04480817 0.092122786 -0.13604993; -0.008185623 -0.0717955 -0.048076823; 0.09130952 -0.053567354 0.018388905;;; -0.18568736 -0.12400962 -0.104919; -0.13006066 -0.15739155 0.11396596; 0.0008254793 -0.009531839 -0.078244075;;;; 0.10871276 0.12089125 -0.024060115; 0.10367907 -0.029651182 0.013086661; 0.13498285 -0.053227983 -0.1491332;;; -0.29136866 -0.13846414 -0.043760788; -0.17255469 -0.138035 -0.1159682; -0.06765659 -0.112094 -0.16525503;;; -0.4865254 -0.32772923 -0.13330008; -0.42590725 -0.33713737 -0.3230917; -0.36711195 -0.04653347 -0.08935336;;; … ;;; 0.5899668 0.5368703 0.5256972; 0.4559471 0.31147859 0.40561706; 0.269548 0.4050247 0.25834495;;; -1.1069013 -1.4215429 -1.2190584; -1.3303885 -1.7814963 -1.4701008; -1.3221347 -1.507736 -1.5136461;;; 0.11928751 0.080059595 0.09376048; -0.02374018 -0.04088516 0.18081057; 0.07040813 -0.14814371 0.1110147;;;; … ;;;; 0.26415822 0.2650189 0.31621864; 0.23022453 0.23281778 0.11937006; 0.16332737 0.1561928 0.27536562;;; -0.05294837 -0.06612764 -0.08159666; -0.09360858 0.029901061 -0.10968407; 0.02560177 -0.013727121 -0.07322843;;; 0.16485065 0.067685254 0.16604166; 0.044716667 -0.134529 0.004497062; 0.0820052 -0.016214283 0.09519673;;; … ;;; 0.3950531 0.17958419 0.15493982; 0.30908132 0.4419503 0.4124087; 0.40274444 0.32235765 0.0830516;;; 0.1599784 0.062983766 0.21791695; 0.25491965 0.21932305 0.27069694; 0.21038881 0.17972504 0.14194594;;; 0.1515782 0.10669196 0.28413683; 0.039226666 0.25106362 0.25085387; 0.167038 0.113748886 0.22491865;;;; 0.08003582 0.05422766 0.023555432; 0.024777832 0.12133836 0.08701734; 0.09341219 -0.024967626 0.032816842;;; 0.039026495 0.11083005 0.0037922477; 0.01260674 -0.065693475 -0.08848513; 0.023691244 -0.027520504 -0.29384202;;; 0.05540608 -0.01739945 -0.14498779; 0.03356454 -0.04964022 -0.09045817; -0.16015534 0.029092075 -0.11088025;;; … ;;; 0.11590049 0.2853804 0.15860167; 0.23457752 0.23067166 0.030949207; -0.016515754 0.09254872 -0.052669477;;; -0.03788733 0.009690789 -0.08045262; 0.28263438 0.07460654 -0.11884126; -0.038262993 -0.018022506 0.029937595;;; -0.2030993 0.049562406 -0.027646875; -0.23358913 -0.22652747 -0.22187945; -0.105145045 -0.27482387 0.010262072;;;; -0.24450046 -0.27822027 -0.37343848; -0.35807842 -0.35130763 -0.5030494; -0.567563 -0.43859652 -0.6052176;;; 0.29692018 0.26909828 0.2483256; 0.23884127 0.28830713 0.29682332; 0.27306715 0.20698616 0.27244547;;; 0.31171775 0.3161396 0.7493256; 0.33578008 0.49293095 0.80023074; 0.39339986 0.45134035 0.7438353;;; … ;;; -0.11635591 -0.14630328 -0.051253602; -0.23000103 -0.31247184 -0.27829295; -0.2384953 -0.23640914 -0.33179998;;; -0.06279065 -0.035671964 -0.06825111; 0.010673962 -0.1064326 -0.16751035; 0.14720385 -0.073051326 -0.09534922;;; 0.40973714 0.4715226 0.62366295; 0.42931515 0.50844806 0.4517463; 0.32270262 0.35767114 0.5660996]
- ...
- 90: Array{Float32}((1024,)) Float32[-0.29546136, 0.12316175, -0.4333991, 0.04932005, -1.2182881, -0.88146466, -0.84309345, -1.6838812, 2.5601938, 1.9808251 … 0.19737825, 1.0077192, -0.86438835, -0.067379855, 0.015523051, 0.40822363, -0.06621101, 0.19198619, -0.027223261, 0.35390672]
- 91: Array{Float32}((512, 1024)) Float32[0.35254595 0.37300357 … -0.16560957 -0.12532672; -0.3787931 -0.42160413 … 0.1686524 0.20694582; … ; -0.1882783 -0.16154407 … -0.06835318 0.2215285; -0.0010631765 0.07925456 … -0.04693754 -0.049447425]
- 92: Array{Float32}((512,)) Float32[-0.00042381042, 0.029745022, -0.00067011104, -0.00583051, 0.002128529, 0.012743832, 0.0022065798, -0.0006338556, -0.0025254688, -0.001961066 … -0.0021215933, -0.0016222714, 0.0139183765, 0.0004390899, -0.022228813, 0.040085506, 0.007011194, 0.027901476, -0.00077107997, 0.00029254716]
- 93: Array{Float32}((512,)) Float32[-0.00058319455, 0.13426286, -0.5411829, -0.26358172, -0.25671864, -0.39294815, 0.40810674, -0.0010609375, 0.02162393, -0.36817032 … 0.00089147367, 0.20706993, -0.26106647, -0.19039036, -0.25707412, -0.025029691, 0.00011088717, -0.0027701575, -0.011379168, 0.16919172]
- 94: Array{Float32}((10, 512)) Float32[0.033576462 -0.049214333 … -0.020066774 0.015939686; 0.03776026 0.038943905 … -0.018442996 0.013342874; … ; 0.035780597 -0.027480626 … -0.021669282 0.020547876; 0.039229594 0.037363328 … -0.019827502 0.018211719]
- freeze: Bool false
- params: Base.IdSet{Any}
- dict: IdDict{Any, Nothing}
- ht: Array{Any}((256,))
- 1: Array{Float32}((256,)) Float32[0.6200117, -0.72974277, 5.113774, -2.148265, -1.577467, -1.5961236, -1.9828687, -2.3928611, -1.0618314, -2.5876248 … -2.3245878, -1.5333382, 5.439896, 0.62296313, -2.683208, -3.1357234, -1.4270524, -2.2430596, 0.06796829, -0.90764433]
- 2: Nothing nothing
- 3: #undef
- 4: #undef
- 5: #undef
- ...
- 252: Nothing nothing
- 253: Array{Float32}((128,)) Float32[0.36903876, 0.31740677, 0.93585, 0.1871628, -0.2116408, 0.04046653, 0.2173404, 0.093711, -1.1932013, -0.1260338 … -0.87436074, 0.1562037, 0.6081963, 0.11023683, 0.63319623, -0.032162294, 0.8218119, -0.7798629, 0.09221165, 1.3291281]
- 254: Nothing nothing
- 255: Array{Float32}((10, 512)) Float32[0.033576462 -0.049214333 … -0.020066774 0.015939686; 0.03776026 0.038943905 … -0.018442996 0.013342874; … ; 0.035780597 -0.027480626 … -0.021669282 0.020547876; 0.039229594 0.037363328 … -0.019827502 0.018211719]
- 256: Nothing nothing
- count: Int64 94
- ndel: Int64 0
- step: FluxTraining.PropDict{Any}
- d: Dict{Symbol, Any}
- slots: Array{UInt8}((16,)) UInt8[0x00, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00]
- keys: Array{Symbol}((16,))
- 1: #undef
- 2: Symbol loss
- 3: Symbol ys
- 4: #undef
- 5: #undef
- ...
- 12: #undef
- 13: Symbol ŷs
- 14: #undef
- 15: #undef
- 16: #undef
- vals: Array{Any}((16,))
- 1: #undef
- 2: Float32 1.8559896f0
- 3: OneHotArrays.OneHotMatrix{UInt32, Vector{UInt32}}
- indices: Array{UInt32}((23,)) UInt32[0x0000000a, 0x0000000a, 0x0000000a, 0x0000000a, 0x0000000a, 0x0000000a, 0x0000000a, 0x0000000a, 0x0000000a, 0x0000000a … 0x0000000a, 0x0000000a, 0x0000000a, 0x0000000a, 0x0000000a, 0x0000000a, 0x0000000a, 0x0000000a, 0x0000000a, 0x0000000a]
- nlabels: Int64 10
- 4: #undef
- 5: #undef
- ...
- 12: #undef
- 13: Array{Float32}((10, 23)) Float32[-0.3279138 -1.3054168 … -1.554836 -0.6417408; 0.39954266 -2.4832747 … -1.8207791 -0.11632016; … ; 0.37614053 1.5208721 … 0.9270007 -0.86113995; 0.6324876 3.55293 … 2.4468198 -0.7564703]
- 14: #undef
- 15: #undef
- 16: #undef
- ndel: Int64 0
- count: Int64 4
- age: UInt64 0x0000000000000006
- idxfloor: Int64 1
- maxprobe: Int64 0
- callbacks: FluxTraining.Callbacks
- cbs: Array{FluxTraining.SafeCallback}((6,))
- 1: ToDevice
- movedatafn: gpu (function of type typeof(gpu))
- movemodelfn: gpu (function of type typeof(gpu))
- 2: ProgressPrinter
- p: ProgressMeter.Progress
- n: Int64 84
- reentrantlocker: ReentrantLock
- locked_by: Nothing nothing
- reentrancy_cnt: UInt32 0x00000000
- havelock: UInt8 0x00
- cond_wait: Base.GenericCondition{Base.Threads.SpinLock}
- waitq: Base.InvasiveLinkedList{Task}
- head: Nothing nothing
- tail: Nothing nothing
- lock: Base.Threads.SpinLock
- owned: Int64 0
- _: Tuple{Int64, Int64, Int64}
- 1: Int64 4
- 2: Int64 512
- 3: Int64 11
- dt: Float64 0.1
- counter: Int64 84
- tinit: Float64 1.701038614115002e9
- tsecond: Float64 1.701038618897073e9
- tlast: Float64 1.7010387932484808e9
- printed: Bool true
- desc: String "Epoch 2 ValidationPhase(): "
- barlen: Nothing nothing
- barglyphs: ProgressMeter.BarGlyphs
- leftend: Char '|'
- fill: Char '█'
- front: Array{Char}((7,))
- 1: Char '▏'
- 2: Char '▎'
- 3: Char '▍'
- 4: Char '▌'
- 5: Char '▋'
- 6: Char '▊'
- 7: Char '▉'
- empty: Char ' '
- rightend: Char '|'
- color: Symbol green
- output: Base.TTY
- handle: Ptr{Nothing} @0x0000000000ae0f10
- status: Int64 3
- buffer: IOBuffer
- data: Array{UInt8}((0,)) UInt8[]
- readable: Bool true
- writable: Bool true
- seekable: Bool false
- append: Bool true
- size: Int64 0
- maxsize: Int64 9223372036854775807
- ptr: Int64 1
- mark: Int64 -1
- cond: Base.GenericCondition{Base.Threads.SpinLock}
- waitq: Base.InvasiveLinkedList{Task}
- head: Nothing nothing
- tail: Nothing nothing
- lock: Base.Threads.SpinLock
- owned: Int64 0
- readerror: Nothing nothing
- sendbuf: Nothing nothing
- lock: ReentrantLock
- locked_by: Nothing nothing
- reentrancy_cnt: UInt32 0x00000000
- havelock: UInt8 0x00
- cond_wait: Base.GenericCondition{Base.Threads.SpinLock}
- waitq: Base.InvasiveLinkedList{Task}
- lock: Base.Threads.SpinLock
- _: Tuple{Int64, Int64, Int64}
- 1: Int64 0
- 2: Int64 0
- 3: Int64 0
- throttle: Int64 10485760
- offset: Int64 0
- numprintedvalues: Int64 0
- start: Int64 0
- enabled: Bool true
- showspeed: Bool false
- check_iterations: Int64 1
- prev_update_count: Int64 83
- threads_used: Array{Int64}((0,)) Int64[]
- 3: MetricsPrinter MetricsPrinter()
- 4: StopOnNaNLoss StopOnNaNLoss()
- 5: Recorder Recorder()
- 6: Metrics
- metrics: Tuple{Loss}
- 1: Loss
- statistic: OnlineStatsBase.Mean{Float64, OnlineStatsBase.EqualWeight}
- μ: Float64 3.7050710320472713
- weight: OnlineStatsBase.EqualWeight EqualWeight
- n: Int64 84
- _statistic: OnlineStatsBase.Mean{Float64, OnlineStatsBase.EqualWeight}
- μ: Float64 0.0
- weight: OnlineStatsBase.EqualWeight EqualWeight
- n: Int64 0
- last: Float32 1.8559896f0
- name: String "Loss"
- runner: FluxTraining.LinearRunner FluxTraining.LinearRunner()
- graph: Graphs.SimpleGraphs.SimpleDiGraph{Int64}
- ne: Int64 8
- fadjlist: Array{Vector{Int64}}((6,))
- 1: Array{Int64}((3,)) [4, 5, 6]
- 2: Array{Int64}((0,)) Int64[]
- 3: Array{Int64}((0,)) Int64[]
- 4: Array{Int64}((0,)) Int64[]
- 5: Array{Int64}((3,)) [2, 3, 6]
- 6: Array{Int64}((2,)) [2, 3]
- badjlist: Array{Vector{Int64}}((6,))
- 1: Array{Int64}((0,)) Int64[]
- 2: Array{Int64}((2,)) [5, 6]
- 3: Array{Int64}((2,)) [5, 6]
- 4: Array{Int64}((1,)) [1]
- 5: Array{Int64}((1,)) [1]
- 6: Array{Int64}((2,)) [1, 5]
- initialized: Bool false
- cbstate: FluxTraining.PropDict{Any}
- d: Dict{Symbol, Any}
- slots: Array{UInt8}((16,)) UInt8[0x01, 0x01, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
- keys: Array{Symbol}((16,))
- 1: Symbol metricsstep
- 2: Symbol hyperparams
- 3: Symbol history
- 4: #undef
- 5: Symbol metricsepoch
- ...
- 12: #undef
- 13: #undef
- 14: #undef
- 15: #undef
- 16: #undef
- vals: Array{Any}((16,))
- 1: DataStructures.DefaultDict{FluxTraining.Phases.Phase, ValueHistories.MVHistory, FluxTraining.var"#41#43"}
- d: DataStructures.DefaultDictBase{FluxTraining.Phases.Phase, ValueHistories.MVHistory, FluxTraining.var"#41#43", Dict{FluxTraining.Phases.Phase, ValueHistories.MVHistory}}
- default: #41 (function of type FluxTraining.var"#41#43")
- d: Dict{FluxTraining.Phases.Phase, ValueHistories.MVHistory}
- slots: Array{UInt8}((16,)) UInt8[0x01, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
- keys: Array{FluxTraining.Phases.Phase}((16,))
- 1: TrainingPhase TrainingPhase()
- 2: #undef
- 3: #undef
- 4: #undef
- 5: #undef
- ...
- 12: #undef
- 13: #undef
- 14: #undef
- 15: #undef
- 16: #undef
- vals: Array{ValueHistories.MVHistory}((16,))
- 1: ValueHistories.MVHistory{ValueHistories.History}
- 2: #undef
- 3: #undef
- 4: #undef
- 5: #undef
- ...
- 12: #undef
- 13: #undef
- 14: #undef
- 15: #undef
- 16: #undef
- ndel: Int64 0
- count: Int64 2
- age: UInt64 0x0000000000000002
- idxfloor: Int64 1
- maxprobe: Int64 0
- passkey: Bool false
- 2: ValueHistories.MVHistory{ValueHistories.History}
- storage: Dict{Symbol, ValueHistories.History}
- slots: Array{UInt8}((16,)) UInt8[0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01]
- keys: Array{Symbol}((16,))
- 1: #undef
- 2: #undef
- 3: #undef
- 4: #undef
- 5: #undef
- ...
- 12: #undef
- 13: #undef
- 14: #undef
- 15: #undef
- 16: Symbol LearningRate
- vals: Array{ValueHistories.History}((16,))
- 1: #undef
- 2: #undef
- 3: #undef
- 4: #undef
- 5: #undef
- ...
- 12: #undef
- 13: #undef
- 14: #undef
- 15: #undef
- 16: ValueHistories.History{Int64, Float64}
- lastiter: Int64 1339
- iterations: Array{Int64}((1340,)) [0, 1, 2, 3, 4, 5, 6, 7, 8, 9 … 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339]
- values: Array{Float64}((1340,)) [0.004, 0.004450136999401451, 0.004900264102019719, 0.005350371411289214, 0.005800449031079526, 0.006250487065913003, 0.006700475621182311, 0.007150404803367985, 0.007600264720255944, 0.008050045481154986 … 0.0015639021542229693, 0.001407622820390742, 0.0012513400503049684, 0.0010950542257505004, 0.0009387657285194746, 0.0007824749404108225, 0.0006261822432289389, 0.000469888018782794, 0.00031359264888517743, 0.0001572965153518101]
- ndel: Int64 0
- count: Int64 1
- age: UInt64 0x0000000000000001
- idxfloor: Int64 1
- maxprobe: Int64 0
- 3: DataStructures.DefaultDict{FluxTraining.Phases.Phase, FluxTraining.History, FluxTraining.var"#50#51"}
- d: DataStructures.DefaultDictBase{FluxTraining.Phases.Phase, FluxTraining.History, FluxTraining.var"#50#51", Dict{FluxTraining.Phases.Phase, FluxTraining.History}}
- default: #50 (function of type FluxTraining.var"#50#51")
- d: Dict{FluxTraining.Phases.Phase, FluxTraining.History}
- slots: Array{UInt8}((16,)) UInt8[0x01, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
- keys: Array{FluxTraining.Phases.Phase}((16,))
- 1: TrainingPhase TrainingPhase()
- 2: #undef
- 3: #undef
- 4: #undef
- 5: #undef
- ...
- 12: #undef
- 13: #undef
- 14: #undef
- 15: #undef
- 16: #undef
- vals: Array{FluxTraining.History}((16,))
- 1: FluxTraining.History
- 2: #undef
- 3: #undef
- 4: #undef
- 5: #undef
- ...
- 12: #undef
- 13: #undef
- 14: #undef
- 15: #undef
- 16: #undef
- ndel: Int64 0
- count: Int64 2
- age: UInt64 0x0000000000000002
- idxfloor: Int64 1
- maxprobe: Int64 0
- passkey: Bool false
- 4: #undef
- 5: DataStructures.DefaultDict{FluxTraining.Phases.Phase, ValueHistories.MVHistory, FluxTraining.var"#42#44"}
- d: DataStructures.DefaultDictBase{FluxTraining.Phases.Phase, ValueHistories.MVHistory, FluxTraining.var"#42#44", Dict{FluxTraining.Phases.Phase, ValueHistories.MVHistory}}
- default: #42 (function of type FluxTraining.var"#42#44")
- d: Dict{FluxTraining.Phases.Phase, ValueHistories.MVHistory}
- slots: Array{UInt8}((16,)) UInt8[0x01, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
- keys: Array{FluxTraining.Phases.Phase}((16,))
- 1: TrainingPhase TrainingPhase()
- 2: #undef
- 3: #undef
- 4: #undef
- 5: #undef
- ...
- 12: #undef
- 13: #undef
- 14: #undef
- 15: #undef
- 16: #undef
- vals: Array{ValueHistories.MVHistory}((16,))
- 1: ValueHistories.MVHistory{ValueHistories.History}
- 2: #undef
- 3: #undef
- 4: #undef
- 5: #undef
- ...
- 12: #undef
- 13: #undef
- 14: #undef
- 15: #undef
- 16: #undef
- ndel: Int64 0
- count: Int64 2
- age: UInt64 0x0000000000000002
- idxfloor: Int64 1
- maxprobe: Int64 0
- passkey: Bool false
- ...
- 12: #undef
- 13: #undef
- 14: #undef
- 15: #undef
- 16: #undef
- ndel: Int64 0
- count: Int64 4
- age: UInt64 0x0000000000000004
- idxfloor: Int64 1
- maxprobe: Int64 0
Add Comment
Please, Sign In to add comment