The following issues were found

torch/distributed/optim/functional_sgd.py
9 issues
TODO: Once step_param interface is robust, refactor step to call
Error

Line: 48 Column: 3

                      """ Similar to self.step, but operates on a single parameter and
            its gradient.
        """
        # TODO: Once step_param interface is robust, refactor step to call
        # step param on each param.
        weight_decay = self.defaults['weight_decay']
        momentum = self.defaults['momentum']
        dampening = self.defaults['dampening']
        lr = self.defaults['lr']

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              from typing import List, Optional, Dict
import torch
import torch.optim._functional as F

from torch import Tensor

# Define a TorchScript compatible Functional SGD Optimizer
# where we use these optimizer in a functional way.
# Instead of using the `param.grad` when updating parameters,

            

Reported by Pylint.

Class '_FunctionalSGD' inherits from object, can be safely removed from bases in python3
Error

Line: 17 Column: 1

              # NOTE: This should be only used by distributed optimizer internals
# and not meant to expose to the user.
@torch.jit.script
class _FunctionalSGD(object):
    def __init__(
        self,
        params: List[Tensor],
        lr: float = 1e-2,
        momentum: float = 0.0,

            

Reported by Pylint.

Too many arguments (8/5)
Error

Line: 18 Column: 5

              # and not meant to expose to the user.
@torch.jit.script
class _FunctionalSGD(object):
    def __init__(
        self,
        params: List[Tensor],
        lr: float = 1e-2,
        momentum: float = 0.0,
        dampening: float = 0.0,

            

Reported by Pylint.

Variable name "lr" doesn't conform to snake_case naming style
Error

Line: 53 Column: 9

                      weight_decay = self.defaults['weight_decay']
        momentum = self.defaults['momentum']
        dampening = self.defaults['dampening']
        lr = self.defaults['lr']
        params = [param]
        momentum_buffer_list: List[Optional[Tensor]] = []
        grads = []
        if grad is not None:
            grads.append(grad)

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 84 Column: 5

                      if momentum_buffer is not None:
            state['momentum_buffer'] = momentum_buffer

    def step(self, gradients: List[Optional[Tensor]]):
        params = self.param_group['params']
        params_with_grad = []
        grads = []
        momentum_buffer_list: List[Optional[Tensor]] = []
        lr = self.defaults['lr']

            

Reported by Pylint.

Too many local variables (16/15)
Error

Line: 84 Column: 5

                      if momentum_buffer is not None:
            state['momentum_buffer'] = momentum_buffer

    def step(self, gradients: List[Optional[Tensor]]):
        params = self.param_group['params']
        params_with_grad = []
        grads = []
        momentum_buffer_list: List[Optional[Tensor]] = []
        lr = self.defaults['lr']

            

Reported by Pylint.

Variable name "lr" doesn't conform to snake_case naming style
Error

Line: 89 Column: 9

                      params_with_grad = []
        grads = []
        momentum_buffer_list: List[Optional[Tensor]] = []
        lr = self.defaults['lr']
        weight_decay = self.defaults['weight_decay']
        momentum = self.defaults['momentum']
        dampening = self.defaults['dampening']

        if len(params) != len(gradients):

            

Reported by Pylint.

Variable name "p" doesn't conform to snake_case naming style
Error

Line: 126 Column: 16

                                nesterov=self.nesterov)

        # update momentum_buffers in state
        for i, p in enumerate(params_with_grad):
            state = self.state[p]
            momentum_buffer = momentum_buffer_list[i]
            if momentum_buffer is not None:
                state['momentum_buffer'] = momentum_buffer

            

Reported by Pylint.

tools/render_junit.py
9 issues
Consider explicitly re-raising using the 'from' keyword
Error

Line: 10 Column: 5

              try:
    from junitparser import JUnitXml, TestSuite, TestCase, Error, Failure  # type: ignore[import]
except ImportError:
    raise ImportError(
        "junitparser not found, please install with 'pip install junitparser'"
    )

try:
    import rich

            

Reported by Pylint.

Catching too general exception Exception
Error

Line: 23 Column: 16

                  def parse_file(path: str) -> List[TestCase]:  # type: ignore[no-any-unimported]
        try:
            return convert_junit_to_testcases(JUnitXml.fromfile(path))
        except Exception as err:
            rich.print(f":Warning: [yellow]Warning[/yellow]: Failed to read {path}: {err}")
            return []

    if not os.path.exists(path_to_reports):
        raise FileNotFoundError(f"Path '{path_to_reports}', not found")

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              #!/usr/bin/env python3

import argparse
import os
from typing import Any, List, Union

try:
    from junitparser import JUnitXml, TestSuite, TestCase, Error, Failure  # type: ignore[import]
except ImportError:

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 19 Column: 1

              except ImportError:
    print("rich not found, for color output use 'pip install rich'")

def parse_junit_reports(path_to_reports: str) -> List[TestCase]:  # type: ignore[no-any-unimported]
    def parse_file(path: str) -> List[TestCase]:  # type: ignore[no-any-unimported]
        try:
            return convert_junit_to_testcases(JUnitXml.fromfile(path))
        except Exception as err:
            rich.print(f":Warning: [yellow]Warning[/yellow]: Failed to read {path}: {err}")

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 40 Column: 1

                  return ret_xml


def convert_junit_to_testcases(xml: Union[JUnitXml, TestSuite]) -> List[TestCase]:  # type: ignore[no-any-unimported]
    testcases = []
    for item in xml:
        if isinstance(item, TestSuite):
            testcases.extend(convert_junit_to_testcases(item))
        else:

            

Reported by Pylint.

Line too long (117/100)
Error

Line: 40 Column: 1

                  return ret_xml


def convert_junit_to_testcases(xml: Union[JUnitXml, TestSuite]) -> List[TestCase]:  # type: ignore[no-any-unimported]
    testcases = []
    for item in xml:
        if isinstance(item, TestSuite):
            testcases.extend(convert_junit_to_testcases(item))
        else:

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 49 Column: 1

                          testcases.append(item)
    return testcases

def render_tests(testcases: List[TestCase]) -> None:  # type: ignore[no-any-unimported]
    num_passed = 0
    num_skipped = 0
    num_failed = 0
    for testcase in testcases:
        if not testcase.result:

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 75 Column: 1

              


def parse_args() -> Any:
    parser = argparse.ArgumentParser(
        description="Render xunit output for failed tests",
    )
    parser.add_argument(
        "report_path",

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 86 Column: 1

                  return parser.parse_args()


def main() -> None:
    options = parse_args()
    testcases = parse_junit_reports(options.report_path)
    render_tests(testcases)



            

Reported by Pylint.

test/optim/test.py
9 issues
Unable to import 'torch'
Error

Line: 2 Column: 1

              import json
import torch
import torch.legacy.optim as optim


def rosenbrock(tensor):
    x, y = tensor
    return (1 - x) ** 2 + 100 * (y - x ** 2) ** 2


            

Reported by Pylint.

Unable to import 'torch.legacy.optim'
Error

Line: 3 Column: 1

              import json
import torch
import torch.legacy.optim as optim


def rosenbrock(tensor):
    x, y = tensor
    return (1 - x) ** 2 + 100 * (y - x ** 2) ** 2


            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import json
import torch
import torch.legacy.optim as optim


def rosenbrock(tensor):
    x, y = tensor
    return (1 - x) ** 2 + 100 * (y - x ** 2) ** 2


            

Reported by Pylint.

Missing function or method docstring
Error

Line: 6 Column: 1

              import torch.legacy.optim as optim


def rosenbrock(tensor):
    x, y = tensor
    return (1 - x) ** 2 + 100 * (y - x ** 2) ** 2


def drosenbrock(tensor):

            

Reported by Pylint.

Variable name "y" doesn't conform to snake_case naming style
Error

Line: 7 Column: 8

              

def rosenbrock(tensor):
    x, y = tensor
    return (1 - x) ** 2 + 100 * (y - x ** 2) ** 2


def drosenbrock(tensor):
    x, y = tensor

            

Reported by Pylint.

Variable name "x" doesn't conform to snake_case naming style
Error

Line: 7 Column: 5

              

def rosenbrock(tensor):
    x, y = tensor
    return (1 - x) ** 2 + 100 * (y - x ** 2) ** 2


def drosenbrock(tensor):
    x, y = tensor

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 11 Column: 1

                  return (1 - x) ** 2 + 100 * (y - x ** 2) ** 2


def drosenbrock(tensor):
    x, y = tensor
    return torch.DoubleTensor((-400 * x * (y - x ** 2) - 2 * (1 - x), 200 * (y - x ** 2)))

algorithms = {
    'adadelta': optim.adadelta,

            

Reported by Pylint.

Variable name "x" doesn't conform to snake_case naming style
Error

Line: 12 Column: 5

              

def drosenbrock(tensor):
    x, y = tensor
    return torch.DoubleTensor((-400 * x * (y - x ** 2) - 2 * (1 - x), 200 * (y - x ** 2)))

algorithms = {
    'adadelta': optim.adadelta,
    'adagrad': optim.adagrad,

            

Reported by Pylint.

Variable name "y" doesn't conform to snake_case naming style
Error

Line: 12 Column: 8

              

def drosenbrock(tensor):
    x, y = tensor
    return torch.DoubleTensor((-400 * x * (y - x ** 2) - 2 * (1 - x), 200 * (y - x ** 2)))

algorithms = {
    'adadelta': optim.adadelta,
    'adagrad': optim.adagrad,

            

Reported by Pylint.

tools/code_coverage/package/util/setting.py
9 issues
Missing module docstring
Error

Line: 1 Column: 1

              import os
from enum import Enum
from typing import Dict, List, Set


# <project folder>
HOME_DIR = os.environ["HOME"]
TOOLS_FOLDER = os.path.join(
    os.path.dirname(os.path.realpath(__file__)), os.path.pardir, os.path.pardir

            

Reported by Pylint.

Line too long (104/100)
Error

Line: 23 Column: 1

              LOG_DIR = os.path.join(PROFILE_DIR, "log")


# test type, DO NOT change the name, it should be consistent with [buck query --output-attribute] result
class TestType(Enum):
    CPP: str = "cxx_test"
    PY: str = "python_test"



            

Reported by Pylint.

Missing class docstring
Error

Line: 24 Column: 1

              

# test type, DO NOT change the name, it should be consistent with [buck query --output-attribute] result
class TestType(Enum):
    CPP: str = "cxx_test"
    PY: str = "python_test"


class Test:

            

Reported by Pylint.

Too few public methods (0/2)
Error

Line: 29 Column: 1

                  PY: str = "python_test"


class Test:
    name: str
    target_pattern: str
    test_set: str  # like __aten__
    test_type: TestType


            

Reported by Pylint.

Missing class docstring
Error

Line: 29 Column: 1

                  PY: str = "python_test"


class Test:
    name: str
    target_pattern: str
    test_set: str  # like __aten__
    test_type: TestType


            

Reported by Pylint.

Missing class docstring
Error

Line: 49 Column: 1

              

# option
class Option:
    need_build: bool = False
    need_run: bool = False
    need_merge: bool = False
    need_export: bool = False
    need_summary: bool = False

            

Reported by Pylint.

Too few public methods (0/2)
Error

Line: 49 Column: 1

              

# option
class Option:
    need_build: bool = False
    need_run: bool = False
    need_merge: bool = False
    need_export: bool = False
    need_summary: bool = False

            

Reported by Pylint.

Missing class docstring
Error

Line: 59 Column: 1

              

# test platform
class TestPlatform(Enum):
    FBCODE: str = "fbcode"
    OSS: str = "oss"


# compiler type

            

Reported by Pylint.

Missing class docstring
Error

Line: 65 Column: 1

              

# compiler type
class CompilerType(Enum):
    CLANG: str = "clang"
    GCC: str = "gcc"

            

Reported by Pylint.

torch/distributed/pipeline/sync/_balance/__init__.py
9 issues
Unable to import '__init__.profile'
Error

Line: 28 Column: 1

              import torch.nn as nn

from . import blockpartition
from .profile import profile_sizes, profile_times

__all__ = ["balance_by_time", "balance_by_size"]


Device = Union[torch.device, int, str]

            

Reported by Pylint.

Module 'torch' has no 'device' member
Error

Line: 33 Column: 16

              __all__ = ["balance_by_time", "balance_by_size"]


Device = Union[torch.device, int, str]

Tensors = Sequence[Tensor]
TensorOrTensors = Union[Tensor, Tensors]



            

Reported by Pylint.

Module 'torch' has no 'device' member
Error

Line: 50 Column: 22

                  sample: Union[List[Any], Tensor],
    *,
    timeout: float = 1.0,
    device: Device = torch.device("cuda"),
) -> List[int]:
    """Naive automatic balancing by elapsed time per layer.
    ::

        sample = torch.empty(128, 3, 224, 224)

            

Reported by Pylint.

Module 'torch' has no 'device' member
Error

Line: 83 Column: 52

                      `module` and `sample` must be placed on the same device.

    """
    times = profile_times(module, sample, timeout, torch.device(device))
    return balance_cost(times, partitions)


def balance_by_size(
    partitions: int,

            

Reported by Pylint.

Module 'torch' has no 'device' member
Error

Line: 94 Column: 22

                  *,
    chunks: int = 1,
    param_scale: float = 2.0,
    device: Device = torch.device("cuda"),
) -> List[int]:
    """Naive automatic balancing by CUDA memory usage per layer.

    During training, required memory for parameters depends on which optimizer
    is used. Optimizers may use buffers for each parameter to track

            

Reported by Pylint.

Module 'torch' has no 'device' member
Error

Line: 163 Column: 63

                      `module` and `input` must be placed on the same CUDA device.

    """
    sizes = profile_sizes(module, input, chunks, param_scale, torch.device(device))
    return balance_cost(sizes, partitions)

            

Reported by Pylint.

Module import itself
Error

Line: 27 Column: 1

              from torch import Tensor
import torch.nn as nn

from . import blockpartition
from .profile import profile_sizes, profile_times

__all__ = ["balance_by_time", "balance_by_size"]



            

Reported by Pylint.

Redefining built-in 'input'
Error

Line: 90 Column: 5

              def balance_by_size(
    partitions: int,
    module: nn.Sequential,
    input: Union[List[Any], Tensor],
    *,
    chunks: int = 1,
    param_scale: float = 2.0,
    device: Device = torch.device("cuda"),
) -> List[int]:

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 39 Column: 1

              TensorOrTensors = Union[Tensor, Tensors]


def balance_cost(cost: List[int], partitions: int) -> List[int]:
    partitioned = blockpartition.solve(cost, partitions)
    return [len(p) for p in partitioned]


def balance_by_time(

            

Reported by Pylint.

test/test_jit_disabled.py
9 issues
Unable to import 'torch.testing._internal.common_utils'
Error

Line: 5 Column: 1

              import os
import contextlib
import subprocess
from torch.testing._internal.common_utils import TestCase, run_tests, TemporaryFileName


@contextlib.contextmanager
def _jit_disabled():
    cur_env = os.environ.get("PYTORCH_JIT", "1")

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import sys
import os
import contextlib
import subprocess
from torch.testing._internal.common_utils import TestCase, run_tests, TemporaryFileName


@contextlib.contextmanager
def _jit_disabled():

            

Reported by Pylint.

Consider possible security implications associated with subprocess module.
Security blacklist

Line: 4
Suggestion: https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b404-import-subprocess

              import sys
import os
import contextlib
import subprocess
from torch.testing._internal.common_utils import TestCase, run_tests, TemporaryFileName


@contextlib.contextmanager
def _jit_disabled():

            

Reported by Bandit.

Variable name "f" doesn't conform to snake_case naming style
Error

Line: 33 Column: 38

                      # Write `src` out to a temporary so our source inspection logic works
        # correctly.
        with TemporaryFileName() as fname:
            with open(fname, 'w') as f:
                f.write(src)
                with _jit_disabled():
                    out_disabled = subprocess.check_output([
                        sys.executable,
                        fname])

            

Reported by Pylint.

subprocess call - check for execution of untrusted input.
Security injection

Line: 36
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b603_subprocess_without_shell_equals_true.html

                          with open(fname, 'w') as f:
                f.write(src)
                with _jit_disabled():
                    out_disabled = subprocess.check_output([
                        sys.executable,
                        fname])
                out_enabled = subprocess.check_output([
                    sys.executable,
                    fname])

            

Reported by Bandit.

subprocess call - check for execution of untrusted input.
Security injection

Line: 39
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b603_subprocess_without_shell_equals_true.html

                                  out_disabled = subprocess.check_output([
                        sys.executable,
                        fname])
                out_enabled = subprocess.check_output([
                    sys.executable,
                    fname])
                self.assertEqual(out_disabled, out_enabled)

    def test_attribute(self):

            

Reported by Bandit.

Missing function or method docstring
Error

Line: 44 Column: 5

                                  fname])
                self.assertEqual(out_disabled, out_enabled)

    def test_attribute(self):
        _program_string = """
import torch
class Foo(torch.jit.ScriptModule):
    def __init__(self, x):
        super(Foo, self).__init__()

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 60 Column: 5

              """
        self.compare_enabled_disabled(_program_string)

    def test_script_module_construction(self):
        _program_string = """
import torch

class AModule(torch.jit.ScriptModule):
    def __init__(self):

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 76 Column: 5

              """
        self.compare_enabled_disabled(_program_string)

    def test_recursive_script(self):
        _program_string = """
import torch

class AModule(torch.nn.Module):
    def __init__(self):

            

Reported by Pylint.

torch/fx/experimental/fx2trt/converters/batchnorm.py
9 issues
Unable to import 'tensorrt'
Error

Line: 3 Column: 1

              import torch
import numpy as np
import tensorrt as trt
from torch.fx.experimental.fx2trt.fx2trt import tensorrt_converter

from .helper_functions import mark_as_int8_layer, to_numpy, get_dyn_range

def common_batchnorm(network, mod, input_val, layer_name, is_quantized):
    scale = to_numpy(mod.weight) / np.sqrt(

            

Reported by Pylint.

Attempted relative import beyond top-level package
Error

Line: 6 Column: 1

              import tensorrt as trt
from torch.fx.experimental.fx2trt.fx2trt import tensorrt_converter

from .helper_functions import mark_as_int8_layer, to_numpy, get_dyn_range

def common_batchnorm(network, mod, input_val, layer_name, is_quantized):
    scale = to_numpy(mod.weight) / np.sqrt(
        to_numpy(mod.running_var) + mod.eps
    )

            

Reported by Pylint.

Module 'torch' has no 'quint8' member
Error

Line: 22 Column: 76

                  layer.name = layer_name

    if is_quantized:
        mark_as_int8_layer(layer, get_dyn_range(mod.scale, mod.zero_point, torch.quint8))

    return layer.get_output(0)

@tensorrt_converter(torch.nn.modules.batchnorm.BatchNorm2d)
def batchnorm2d(network, submod, args, kwargs, layer_name):

            

Reported by Pylint.

Unused argument 'kwargs'
Error

Line: 40 Column: 50

              

@tensorrt_converter(torch.nn.quantized.modules.batchnorm.BatchNorm2d)
def quantized_batchnorm2d(network, submod, args, kwargs, layer_name):
    input_val = args[0]

    if not isinstance(input_val, trt.tensorrt.ITensor):
        raise RuntimeError(f'Quantized BatchNorm2d received input {input_val} that is not part '
                           'of the TensorRT region!')

            

Reported by Pylint.

Missing module docstring
Error

Line: 1 Column: 1

              import torch
import numpy as np
import tensorrt as trt
from torch.fx.experimental.fx2trt.fx2trt import tensorrt_converter

from .helper_functions import mark_as_int8_layer, to_numpy, get_dyn_range

def common_batchnorm(network, mod, input_val, layer_name, is_quantized):
    scale = to_numpy(mod.weight) / np.sqrt(

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 8 Column: 1

              
from .helper_functions import mark_as_int8_layer, to_numpy, get_dyn_range

def common_batchnorm(network, mod, input_val, layer_name, is_quantized):
    scale = to_numpy(mod.weight) / np.sqrt(
        to_numpy(mod.running_var) + mod.eps
    )
    bias = (
        to_numpy(mod.bias)

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 27 Column: 1

                  return layer.get_output(0)

@tensorrt_converter(torch.nn.modules.batchnorm.BatchNorm2d)
def batchnorm2d(network, submod, args, kwargs, layer_name):
    # args/kwargs should have already been normalized to kwargs
    assert len(args) == 0
    input_val = kwargs["input"]

    if not isinstance(input_val, trt.tensorrt.ITensor):

            

Reported by Pylint.

Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.
Security

Line: 29
Suggestion: https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html

              @tensorrt_converter(torch.nn.modules.batchnorm.BatchNorm2d)
def batchnorm2d(network, submod, args, kwargs, layer_name):
    # args/kwargs should have already been normalized to kwargs
    assert len(args) == 0
    input_val = kwargs["input"]

    if not isinstance(input_val, trt.tensorrt.ITensor):
        raise RuntimeError(f"BatchNorm2d received input {input_val} that is not part "
                           "of the TensorRT region!")

            

Reported by Bandit.

Missing function or method docstring
Error

Line: 40 Column: 1

              

@tensorrt_converter(torch.nn.quantized.modules.batchnorm.BatchNorm2d)
def quantized_batchnorm2d(network, submod, args, kwargs, layer_name):
    input_val = args[0]

    if not isinstance(input_val, trt.tensorrt.ITensor):
        raise RuntimeError(f'Quantized BatchNorm2d received input {input_val} that is not part '
                           'of the TensorRT region!')

            

Reported by Pylint.

third_party/miniz-2.0.8/examples/example4.c
9 issues
Resource leak: pInfile
Error

Line: 54 CWE codes: 775

                {
     // This is not a limitation of miniz or tinfl, but this example.
     printf("File is too large to be processed by this example.\n");
     return EXIT_FAILURE;
  }

  infile_size = (uint)file_loc;

  pCmp_data = (uint8 *)malloc(infile_size);

            

Reported by Cppcheck.

Resource leak: pInfile
Error

Line: 63 CWE codes: 775

                if (!pCmp_data)
  {
    printf("Out of memory!\n");
    return EXIT_FAILURE;
  }
  if (fread(pCmp_data, 1, infile_size, pInfile) != infile_size)
  {
    printf("Failed reading input file!\n");
    return EXIT_FAILURE;

            

Reported by Cppcheck.

Memory leak: pCmp_data
Error

Line: 68 CWE codes: 401

                if (fread(pCmp_data, 1, infile_size, pInfile) != infile_size)
  {
    printf("Failed reading input file!\n");
    return EXIT_FAILURE;
  }

  // Open output file.
  pOutfile = fopen(argv[2], "wb");
  if (!pOutfile)

            

Reported by Cppcheck.

Resource leak: pInfile
Error

Line: 68 CWE codes: 775

                if (fread(pCmp_data, 1, infile_size, pInfile) != infile_size)
  {
    printf("Failed reading input file!\n");
    return EXIT_FAILURE;
  }

  // Open output file.
  pOutfile = fopen(argv[2], "wb");
  if (!pOutfile)

            

Reported by Cppcheck.

Memory leak: pCmp_data
Error

Line: 76 CWE codes: 401

                if (!pOutfile)
  {
    printf("Failed opening output file!\n");
    return EXIT_FAILURE;
  }

  printf("Input file size: %u\n", infile_size);

  in_buf_size = infile_size;

            

Reported by Cppcheck.

Resource leak: pInfile
Error

Line: 76 CWE codes: 775

                if (!pOutfile)
  {
    printf("Failed opening output file!\n");
    return EXIT_FAILURE;
  }

  printf("Input file size: %u\n", infile_size);

  in_buf_size = infile_size;

            

Reported by Cppcheck.

Resource leak: pInfile
Error

Line: 86 CWE codes: 775

                if (!status)
  {
    printf("tinfl_decompress_mem_to_callback() failed with status %i!\n", status);
    return EXIT_FAILURE;
  }

  outfile_size = ftell(pOutfile);

  fclose(pInfile);

            

Reported by Cppcheck.

fopen - Check when opening files - can an attacker redirect it (via symlinks), force the opening of special file type (e.g., device files), move things around to create a race condition, control its ancestors, or change its contents?
Security

Line: 38 Column: 13 CWE codes: 362

                }

  // Open input file.
  pInfile = fopen(argv[1], "rb");
  if (!pInfile)
  {
    printf("Failed opening input file!\n");
    return EXIT_FAILURE;
  }

            

Reported by FlawFinder.

fopen - Check when opening files - can an attacker redirect it (via symlinks), force the opening of special file type (e.g., device files), move things around to create a race condition, control its ancestors, or change its contents?
Security

Line: 72 Column: 14 CWE codes: 362

                }

  // Open output file.
  pOutfile = fopen(argv[2], "wb");
  if (!pOutfile)
  {
    printf("Failed opening output file!\n");
    return EXIT_FAILURE;
  }

            

Reported by FlawFinder.

torch/csrc/deploy/loader.cpp
9 issues
snprintf - If format strings can be influenced by an attacker, they can be exploited, and note that sprintf variations do not always \0-terminate
Security

Line: 129 Column: 16 CWE codes: 134
Suggestion: Use a constant for the format specification

              
template <typename... Args>
std::string stringf(const char* format, Args... args) {
  int size_s = snprintf(nullptr, 0, format, args...);
  std::string result(size_s + 1, 0);
  snprintf((char*)result.data(), size_s + 1, format, args...);
  return result;
}
// Returns the address of the page containing address 'x'.

            

Reported by FlawFinder.

snprintf - If format strings can be influenced by an attacker, they can be exploited, and note that sprintf variations do not always \0-terminate
Security

Line: 131 Column: 3 CWE codes: 134
Suggestion: Use a constant for the format specification

              std::string stringf(const char* format, Args... args) {
  int size_s = snprintf(nullptr, 0, format, args...);
  std::string result(size_s + 1, 0);
  snprintf((char*)result.data(), size_s + 1, format, args...);
  return result;
}
// Returns the address of the page containing address 'x'.
#define PAGE_START(x) ((x)&PAGE_MASK)


            

Reported by FlawFinder.

access - This usually indicates a security flaw. If an attacker can change anything along the path between the call to access() and the file's actual use (e.g., by moving files), the attacker can exploit the race condition
Security

Line: 774 Column: 13 CWE codes: 362/367!
Suggestion: Set up the correct permissions (e.g., using setuid()) and try to open the file directly

                    for (size_t i = search_path.size(); i > 0; --i) {
        std::stringstream ss;
        ss << search_path[i - 1] << "/" << name;
        if (access(ss.str().c_str(), F_OK) == 0) {
          library_path = ss.str();
          break;
        }
      }
    }

            

Reported by FlawFinder.

realpath - This function does not protect against buffer overflows, and some implementations can overflow internally
Security

Line: 112 Column: 20 CWE codes: 120/785!
Suggestion: Ensure that the destination buffer is at least of size MAXPATHLEN, andto protect against implementation problems, the input argument should also be checked to ensure it is no larger than MAXPATHLEN

                replace_all(result, "$ORIGIN", origin);
  // NOLINTNEXTLINE
  char buf[PATH_MAX];
  char* resolved = realpath(result.c_str(), buf);
  if (!resolved) {
    return result;
  }
  return resolved;
}

            

Reported by FlawFinder.

realpath - This function does not protect against buffer overflows, and some implementations can overflow internally
Security

Line: 122 Column: 3 CWE codes: 120/785!
Suggestion: Ensure that the destination buffer is at least of size MAXPATHLEN, andto protect against implementation problems, the input argument should also be checked to ensure it is no larger than MAXPATHLEN

              std::string resolve_origin(const std::string& so_name) {
  // NOLINTNEXTLINE
  char origin[PATH_MAX];
  realpath(so_name.c_str(), origin);
  dirname(origin);
  return origin;
}

template <typename... Args>

            

Reported by FlawFinder.

char - Statically-sized arrays can be improperly restricted, leading to potential overflows or other issues
Security

Line: 111 Column: 3 CWE codes: 119 120
Suggestion: Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length

                std::string result = t;
  replace_all(result, "$ORIGIN", origin);
  // NOLINTNEXTLINE
  char buf[PATH_MAX];
  char* resolved = realpath(result.c_str(), buf);
  if (!resolved) {
    return result;
  }
  return resolved;

            

Reported by FlawFinder.

char - Statically-sized arrays can be improperly restricted, leading to potential overflows or other issues
Security

Line: 121 Column: 3 CWE codes: 119 120
Suggestion: Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length

              
std::string resolve_origin(const std::string& so_name) {
  // NOLINTNEXTLINE
  char origin[PATH_MAX];
  realpath(so_name.c_str(), origin);
  dirname(origin);
  return origin;
}


            

Reported by FlawFinder.

open - Check when opening files - can an attacker redirect it (via symlinks), force the opening of special file type (e.g., device files), move things around to create a race condition, control its ancestors, or change its contents?
Security

Line: 223 Column: 11 CWE codes: 362

              // ELF files for dependencies before callling dlopen.
struct MemFile {
  MemFile(const char* filename_) : fd_(0), mem_(nullptr), n_bytes_(0) {
    fd_ = open(filename_, O_RDONLY);
    DEPLOY_CHECK(
        fd_ != -1, "failed to open {}: {}", filename_, strerror(errno));
    struct stat s = {0};
    if (-1 == fstat(fd_, &s)) {
      close(fd_); // destructors don't run during exceptions

            

Reported by FlawFinder.

memcpy - Does not check for buffer overflows when copying to destination
Security

Line: 1244 Column: 7 CWE codes: 120
Suggestion: Make sure destination can always hold the source data

                    auto tls_mem = new TLSMemory(shared_from_this(), tls_mem_size_);
      __cxa_thread_atexit_impl(delete_TLSMemory, tls_mem, &__dso_handle);
      start = tls_mem->mem_;
      memcpy(start, tls_initalization_image_, tls_file_size_);
      memset(
          (void*)((const char*)start + tls_file_size_),
          0,
          tls_mem_size_ - tls_file_size_);
      pthread_setspecific(tls_key_, start);

            

Reported by FlawFinder.

tools/linter/clang_tidy/max_tokens_pragma.py
9 issues
Missing module docstring
Error

Line: 1 Column: 1

              import argparse
import re
from typing import List


# > Why is DEFAULT_MAX_TOKEN_COUNT set to 1?
#
# clang-tidy doesn't have a direct way to query for token counts in the
# codebase. The workaround is to set the max token count to 1. This will cause

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 21 Column: 1

              MAX_TOKENS_PRAGMA_PATTERN = r"^#pragma\s+clang\s+max_tokens_total\s+(\d+)$"


def add_max_tokens_pragma(code: str, num_max_tokens: int) -> str:
    lines = code.splitlines()

    found_pragma = False
    pragma = f"#pragma clang max_tokens_total {num_max_tokens}"


            

Reported by Pylint.

Missing function or method docstring
Error

Line: 41 Column: 1

                  return "\n".join(lines)


def strip_max_tokens_pragmas(code: str) -> str:
    lines = code.splitlines()
    lines = [
        line
        for line in lines
        if re.match(MAX_TOKENS_PRAGMA_PATTERN, line.strip()) is None

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 51 Column: 1

                  return "\n".join(lines)


def add_max_tokens_pragma_to_files(files: List[str], num_max_tokens: int) -> None:
    for filename in files:
        with open(filename, "r+") as f:
            data = f.read()
            data = add_max_tokens_pragma(data, num_max_tokens)


            

Reported by Pylint.

Variable name "f" doesn't conform to snake_case naming style
Error

Line: 53 Column: 38

              
def add_max_tokens_pragma_to_files(files: List[str], num_max_tokens: int) -> None:
    for filename in files:
        with open(filename, "r+") as f:
            data = f.read()
            data = add_max_tokens_pragma(data, num_max_tokens)

            f.seek(0)
            f.write(data)

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 62 Column: 1

                          f.truncate()


def strip_max_tokens_pragma_from_files(files: List[str]) -> None:
    for filename in files:
        with open(filename, "r+") as f:
            data = f.read()
            data = strip_max_tokens_pragmas(data)


            

Reported by Pylint.

Variable name "f" doesn't conform to snake_case naming style
Error

Line: 64 Column: 38

              
def strip_max_tokens_pragma_from_files(files: List[str]) -> None:
    for filename in files:
        with open(filename, "r+") as f:
            data = f.read()
            data = strip_max_tokens_pragmas(data)

            f.seek(0)
            f.write(data)

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 73 Column: 1

                          f.truncate()


def parse_args() -> argparse.Namespace:
    parser = argparse.ArgumentParser(
        description="Add max_tokens_total pragmas to C/C++ source files"
    )
    parser.add_argument(
        "-n",

            

Reported by Pylint.

Missing function or method docstring
Error

Line: 99 Column: 1

                  return parser.parse_args()


def main() -> None:
    options = parse_args()

    ignored = set(options.ignore)
    files = [filename for filename in options.files if filename not in ignored]
    if options.strip:

            

Reported by Pylint.