fix(tensor/BatchAccuracyForLogit): fixed not switching grad enabled on again after test calculation
This commit is contained in:
parent
b2958d438f
commit
69380e1456
|
@ -163,7 +163,7 @@ func main() {
|
|||
testAcc := ts.BatchAccuracyForLogits(net, ds.TestImages, ds.TestLabels, vs.Device(), 512)
|
||||
si = gotch.GetSysInfo()
|
||||
memUsed := (float64(si.TotalRam-si.FreeRam) - float64(startRAM)) / 1024
|
||||
fmt.Printf("Epoch:\t %v\t Memory Used:\t [%8.2f MiB]\tLoss: \t %.3f \tAcc: %10.3f\n", epoch, memUsed, lossVal, testAcc*100.0)
|
||||
fmt.Printf("Epoch:\t %v\t Memory Used:\t [%8.2f MiB]\tLoss: \t %.3f \tAcc: %10.2f%%\n", epoch, memUsed, lossVal, testAcc*100.0)
|
||||
iter.Drop()
|
||||
|
||||
// Print out GPU used
|
||||
|
|
|
@ -86,6 +86,8 @@ func BatchAccuracyForLogits(m ModuleT, xs, ys Tensor, d gotch.Device, batchSize
|
|||
acc.MustDrop()
|
||||
}
|
||||
|
||||
MustGradSetEnabled(true)
|
||||
|
||||
return sumAccuracy / sampleCount
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user