Skip to content

Commit 425d402

Browse files
committed
Fix more broadcast deprecations
1 parent dcbab46 commit 425d402

File tree

5 files changed

+9
-9
lines changed

5 files changed

+9
-9
lines changed

src/layers/binary-cross-entropy-loss.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ end
2929
function forward(backend::CPUBackend, state::BinaryCrossEntropyLossLayerState, inputs::Vector{Blob})
3030
pred = vec(inputs[1].data)
3131
label = vec(inputs[2].data)
32-
loss = BLAS.dot(log.(pred), label) + BLAS.dot(log1p(-pred), (1-label))
32+
loss = BLAS.dot(log.(pred), label) + BLAS.dot(log1p.(-pred), (1-label))
3333

3434
num = get_num(inputs[1])
3535
state.loss = state.layer.weight * -loss/num

test/layers/accuracy.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ function test_accuracy_layer(backend::Backend, tensor_dim, T)
1212
input = rand(T, dims)
1313
input_blob = make_blob(backend, input)
1414

15-
label = abs(rand(Int, dims_label)) % dims[op_dim]
15+
label = abs.(rand(Int, dims_label)) .% dims[op_dim]
1616
label = convert(Array{T}, label)
1717
label_blob = make_blob(backend, label)
1818

test/layers/binary-accuracy.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,9 @@ function test_binary_accuracy_layer(backend::Backend, tensor_dim, T, threshold,
88
println(" > $dims")
99
if 0 == threshold
1010
preds = rand(T, dims)*4-2
11-
labels = round(rand(T, dims))*2-1
11+
labels = round.(rand(T, dims))*2-1
1212
elseif 0.5 == threshold
13-
preds = round(rand(T, dims))
13+
preds = round.(rand(T, dims))
1414
labels = rand(T, dims)
1515
else
1616
error("Threshold must be 0 or 0.5; was $threshold")

test/layers/hinge-loss.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ function test_hinge_loss_layer(backend::Backend, T, eps)
88
dims = tuple(rand(6:11, tensor_dim)...)
99
println(" > $dims")
1010
preds = rand(T, dims)*4-2
11-
labels = round(rand(T, dims))*2-1
11+
labels = round.(rand(T, dims))*2-1
1212
errs_mask = preds.*labels .< one(T)
1313

1414
############################################################
@@ -29,7 +29,7 @@ function test_hinge_loss_layer(backend::Backend, T, eps)
2929

3030
forward(backend, state, inputs)
3131

32-
loss = sum(max(one(T) .- preds.*labels, zero(T))) / dims[end]
32+
loss = sum(max.(one(T) .- preds.*labels, zero(T))) / dims[end]
3333
@test -eps < loss-state.loss < eps
3434

3535
backward(backend, state, inputs, diffs)

test/layers/multinomial-logistic-loss.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,9 @@ function test_multinomial_logistic_loss_layer(backend::Backend, tensor_dim, clas
1313
dims = tuple(dims...)
1414
channels = dims[op_dim]
1515

16-
prob = abs(rand(T, dims)) + 0.01
16+
prob = abs.(rand(T, dims)) .+ 0.01
1717

18-
label = abs(rand(Int, dims_label)) % channels
18+
label = abs.(rand(Int, dims_label)) .% channels
1919
label = convert(Array{T}, label)
2020

2121
prob_blob = make_blob(backend, prob)
@@ -27,7 +27,7 @@ function test_multinomial_logistic_loss_layer(backend::Backend, tensor_dim, clas
2727
elseif class_weights[1] == :local
2828
weights = rand(T, channels)
2929
elseif class_weights[1] == :global
30-
weights = round(1000*rand(T, dims[1:end-1]))/1000
30+
weights = round.(1000*rand(T, dims[1:end-1]))/1000
3131
else
3232
@assert class_weights[1] == :no
3333
weights = []

0 commit comments

Comments
 (0)