use irange for loops 8 (#66743)

Summary:
Pull Request resolved: https://github.com/pytorch/pytorch/pull/66743

Modified loops in files under fbsource/fbcode/caffe2/ from the format

`for(TYPE var=x0;var<x_max;x++)`

to the format

`for(const auto var: irange(xmax))`

This was achieved by running r-barnes's loop upgrader script (D28874212) with some modification to exclude all files under /torch/jit and a number of reversions or unused variable suppression warnings added by hand.

Test Plan: Sandcastle

Reviewed By: malfet

Differential Revision: D31705359

fbshipit-source-id: c9ea2fbc0f9cd29e97a52dcb203addc5f2abb09b
This commit is contained in:
Richard Barnes
2021-12-02 10:19:43 -08:00
committed by Facebook GitHub Bot
parent ff3fc37267
commit 17e5200441
64 changed files with 353 additions and 327 deletions

View File

@ -1,5 +1,6 @@
#include <gtest/gtest.h>
#include <c10/util/irange.h>
#include <torch/torch.h>
#include <test/cpp/api/support.h>
@ -1127,7 +1128,7 @@ TEST_F(FunctionalTest, GumbelSoftmax) {
int dims[] = {1, -1};
// NOLINTNEXTLINE(cppcoreguidelines-avoid-c-arrays,modernize-avoid-c-arrays,cppcoreguidelines-avoid-magic-numbers)
int expected[] = {5*3, 5*4};
for(auto i=0; i<2; i++) {
for (const auto i : c10::irange(2)) {
auto logits = torch::randn({5, 4, 3});
int expected_count = expected[i];
auto y_draw = F::gumbel_softmax(logits, F::GumbelSoftmaxFuncOptions().hard(true).dim(dims[i]));
@ -1149,7 +1150,8 @@ TEST_F(FunctionalTest, GumbelSoftmax) {
auto counts = torch::zeros_like(logits);
torch::Tensor y_draw;
for (auto i=0; i<num_draws; i++) {
for (const auto i : c10::irange(num_draws)) {
(void)i; // Suppress unused variable warning
y_draw = F::gumbel_softmax(logits, F::GumbelSoftmaxFuncOptions().hard(true));
counts += y_draw;
}
@ -1175,7 +1177,7 @@ TEST_F(FunctionalTest, Softmax) {
auto output = F::softmax(input, /*dim=*/1);
auto sum = torch::sum(torch::exp(input), 1);
for (int i = 0; i < 2; i++) {
for (const auto i : c10::irange(2)) {
auto expected = torch::exp(input[i]) / sum[i];
ASSERT_TRUE(torch::allclose(output[i], expected));
}
@ -1187,7 +1189,7 @@ TEST_F(FunctionalTest, Softmin) {
auto output = F::softmin(input, /*dim=*/1);
auto sum = torch::sum(torch::exp(-input), 1);
for (int i = 0; i < 2; i++) {
for (const auto i : c10::irange(2)) {
auto expected = torch::exp(-input[i]) / sum[i];
ASSERT_TRUE(torch::allclose(output[i], expected));
}
@ -1199,7 +1201,7 @@ TEST_F(FunctionalTest, LogSoftmax) {
auto output = F::log_softmax(input, /*dim=*/1);
auto sum = torch::sum(torch::exp(input), 1);
for (int i = 0; i < 2; i++) {
for (const auto i : c10::irange(2)) {
auto expected = torch::log(torch::exp(input[i]) / sum[i]);
ASSERT_TRUE(torch::allclose(output[i], expected));
}