Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for per-directory configuration #5833

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 8 additions & 4 deletions pylint/checkers/classes/class_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -992,10 +992,14 @@ def _check_unused_private_attributes(self, node: nodes.ClassDef) -> None:
if isinstance(attribute.expr, nodes.Call):
continue

if assign_attr.expr.name in {
"cls",
node.name,
} and attribute.expr.name in {"cls", "self", node.name}:
if (
assign_attr.expr.name
in {
"cls",
node.name,
}
and attribute.expr.name in {"cls", "self", node.name}
):
# If assigned to cls or class name, can be accessed by cls/self/class name
break

Expand Down
65 changes: 34 additions & 31 deletions pylint/config/config_initialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,42 +21,45 @@ def _config_initialization(
linter: PyLinter,
args_list: list[str],
reporter: reporters.BaseReporter | reporters.MultiReporter | None = None,
config_file: None | str | Path = None,
config_files: None | str | Path = None,
verbose_mode: bool = False,
) -> list[str]:
"""Parse all available options, read config files and command line arguments and
set options accordingly.
"""
config_file = Path(config_file) if config_file else None

# Set the current module to the configuration file
# to allow raising messages on the configuration file.
linter.set_current_module(str(config_file) if config_file else None)

# Read the configuration file
config_file_parser = _ConfigurationFileParser(verbose_mode, linter)
try:
config_data, config_args = config_file_parser.parse_config_file(
file_path=config_file
)
except OSError as ex:
print(ex, file=sys.stderr)
sys.exit(32)

# Run init hook, if present, before loading plugins
if "init-hook" in config_data:
exec(utils._unquote(config_data["init-hook"])) # pylint: disable=exec-used

# Load plugins if specified in the config file
if "load-plugins" in config_data:
linter.load_plugin_modules(utils._splitstrip(config_data["load-plugins"]))

# First we parse any options from a configuration file
try:
linter._parse_configuration_file(config_args)
except _UnrecognizedOptionError as exc:
msg = ", ".join(exc.options)
linter.add_message("unrecognized-option", line=0, args=msg)
config_files = [
Path(config_file) if config_file else None for config_file in config_files
]

for config_file in config_files:
# Set the current module to the configuration file
# to allow raising messages on the configuration file.
linter.set_current_module(str(config_file) if config_file else None)

# Read the configuration file
config_file_parser = _ConfigurationFileParser(verbose_mode, linter)
try:
config_data, config_args = config_file_parser.parse_config_file(
file_path=config_file
)
except OSError as ex:
print(ex, file=sys.stderr)
sys.exit(32)

# Run init hook, if present, before loading plugins
if "init-hook" in config_data:
exec(utils._unquote(config_data["init-hook"])) # pylint: disable=exec-used

# Load plugins if specified in the config file
if "load-plugins" in config_data:
linter.load_plugin_modules(utils._splitstrip(config_data["load-plugins"]))

# First we parse any options from a configuration file
try:
linter._parse_configuration_file(config_args)
except _UnrecognizedOptionError as exc:
msg = ", ".join(exc.options)
linter.add_message("unrecognized-option", line=0, args=msg)

# Then, if a custom reporter is provided as argument, it may be overridden
# by file parameters, so we re-set it here. We do this before command line
Expand Down
24 changes: 18 additions & 6 deletions pylint/config/find_default_config_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,23 @@ def _cfg_has_config(path: Path | str) -> bool:
return any(section.startswith("pylint.") for section in parser.sections())


def search_parent_config_files(curdir):
while (curdir / "__init__.py").is_file():
curdir = curdir.parent
for rc_name in RC_NAMES:
rc_path = curdir / rc_name
if rc_path.is_file():
yield rc_path.resolve()


def find_per_directory_config_files(path: Path) -> Iterator(Path):
for config_name in RC_NAMES:
config_file = path / config_name
if config_file.is_file():
yield config_file.resolve()
yield from search_parent_config_files(path)


def find_default_config_files() -> Iterator[Path]:
"""Find all possible config files."""
for config_name in CONFIG_NAMES:
Expand All @@ -52,12 +69,7 @@ def find_default_config_files() -> Iterator[Path]:

if Path("__init__.py").is_file():
curdir = Path(os.getcwd()).resolve()
while (curdir / "__init__.py").is_file():
curdir = curdir.parent
for rc_name in RC_NAMES:
rc_path = curdir / rc_name
if rc_path.is_file():
yield rc_path.resolve()
yield from search_parent_config_files(curdir)

if "PYLINTRC" in os.environ and Path(os.environ["PYLINTRC"]).exists():
if Path(os.environ["PYLINTRC"]).is_file():
Expand Down
97 changes: 35 additions & 62 deletions pylint/lint/parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@
import dill

from pylint import reporters
from pylint.lint.utils import _patch_sys_path
from pylint.config.find_default_config_files import find_default_config_files
from pylint.lint.utils import _patch_sys_path, extract_results_from_linter, insert_results_to_linter, _merge_mapreduce_data
from pylint.message import Message
from pylint.typing import FileItem
from pylint.utils import LinterStats, merge_stats
Expand All @@ -28,25 +29,26 @@

# PyLinter object used by worker processes when checking files using multiprocessing
# should only be used by the worker processes
_worker_linter: PyLinter | None = None
_worker_linters: PyLinter | None = None


def _worker_initialize(
linter: bytes, arguments: None | str | Sequence[str] = None
linters: bytes, arguments: None | str | Sequence[str] = None
) -> None:
"""Function called to initialize a worker for a Process within a multiprocessing Pool.

:param linter: A linter-class (PyLinter) instance pickled with dill
:param arguments: File or module name(s) to lint and to be added to sys.path
"""
global _worker_linter # pylint: disable=global-statement
_worker_linter = dill.loads(linter)
assert _worker_linter
global _worker_linters # pylint: disable=global-statement
_worker_linters = dill.loads(linters)
assert _worker_linters

# On the worker process side the messages are just collected and passed back to
# parent process as _worker_check_file function's return value
_worker_linter.set_reporter(reporters.CollectingReporter())
_worker_linter.open()
for _worker_linter in _worker_linters.values():
_worker_linter.set_reporter(reporters.CollectingReporter())
_worker_linter.open()

# Patch sys.path so that each argument is importable just like in single job mode
_patch_sys_path(arguments or ())
Expand All @@ -65,66 +67,39 @@ def _worker_check_single_file(
int,
defaultdict[str, list[Any]],
]:
if not _worker_linter:
rcfiles = file_item[0]
file_item = file_item[1]

if not _worker_linters[rcfiles]:
raise Exception("Worker linter not yet initialised")
_worker_linter.open()
_worker_linter.check_single_file_item(file_item)
mapreduce_data = defaultdict(list)
for checker in _worker_linter.get_checkers():
data = checker.get_map_data()
if data is not None:
mapreduce_data[checker.name].append(data)
msgs = _worker_linter.reporter.messages
assert isinstance(_worker_linter.reporter, reporters.CollectingReporter)
_worker_linter.reporter.reset()
if _worker_linter.current_name is None:
warnings.warn(
(
"In pylint 3.0 the current_name attribute of the linter object should be a string. "
"If unknown it should be initialized as an empty string."
),
DeprecationWarning,
)
_worker_linters[rcfiles].open()
_worker_linters[rcfiles].check_single_file_item(file_item)
(
linter_current_name,
_,
base_name,
msgs,
linter_stats,
linter_msg_status,
mapreduce_data,
) = extract_results_from_linter(_worker_linters[rcfiles])
return (
id(multiprocessing.current_process()),
_worker_linter.current_name,
linter_current_name,
file_item.filepath,
_worker_linter.file_state.base_name,
base_name,
msgs,
_worker_linter.stats,
_worker_linter.msg_status,
linter_stats,
linter_msg_status,
mapreduce_data,
)


def _merge_mapreduce_data(
linter: PyLinter,
all_mapreduce_data: defaultdict[int, list[defaultdict[str, list[Any]]]],
) -> None:
"""Merges map/reduce data across workers, invoking relevant APIs on checkers."""
# First collate the data and prepare it, so we can send it to the checkers for
# validation. The intent here is to collect all the mapreduce data for all checker-
# runs across processes - that will then be passed to a static method on the
# checkers to be reduced and further processed.
collated_map_reduce_data: defaultdict[str, list[Any]] = defaultdict(list)
for linter_data in all_mapreduce_data.values():
for run_data in linter_data:
for checker_name, data in run_data.items():
collated_map_reduce_data[checker_name].extend(data)

# Send the data to checkers that support/require consolidated data
original_checkers = linter.get_checkers()
for checker in original_checkers:
if checker.name in collated_map_reduce_data:
# Assume that if the check has returned map/reduce data that it has the
# reducer function
checker.reduce_map_data(linter, collated_map_reduce_data[checker.name])


def check_parallel(
linter: PyLinter,
linters,
jobs: int,
files: Iterable[FileItem],
files, #[(conf, FileItem)],
arguments: None | str | Sequence[str] = None,
) -> None:
"""Use the given linter to lint the files with given amount of workers (jobs).
Expand All @@ -137,7 +112,7 @@ def check_parallel(
# a custom PyLinter object can be used.
initializer = functools.partial(_worker_initialize, arguments=arguments)
with multiprocessing.Pool(
jobs, initializer=initializer, initargs=[dill.dumps(linter)]
jobs, initializer=initializer, initargs=[dill.dumps(linters)]
) as pool:
linter.open()
all_stats = []
Expand All @@ -158,13 +133,11 @@ def check_parallel(
msg_status,
mapreduce_data,
) in pool.imap_unordered(_worker_check_single_file, files):
linter.file_state.base_name = base_name
linter.set_current_module(module, file_path)
for msg in messages:
linter.reporter.handle_message(msg)
insert_results_to_linter(
linter, module, file_path, base_name, messages, msg_status
)
all_stats.append(stats)
all_mapreduce_data[worker_idx].append(mapreduce_data)
linter.msg_status |= msg_status

pool.close()
pool.join()
Expand Down
31 changes: 1 addition & 30 deletions pylint/lint/pylinter.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,6 +310,7 @@ def __init__(
("RP0003", "Messages", report_messages_stats),
)
self.register_checker(self)
self._ignore_paths = self.linter.config.ignore_paths

@property
def option_groups(self) -> tuple[tuple[str, str], ...]:
Expand Down Expand Up @@ -671,33 +672,6 @@ def initialize(self) -> None:
if not msg.may_be_emitted():
self._msgs_state[msg.msgid] = False

@staticmethod
def _discover_files(files_or_modules: Sequence[str]) -> Iterator[str]:
"""Discover python modules and packages in subdirectory.

Returns iterator of paths to discovered modules and packages.
"""
for something in files_or_modules:
if os.path.isdir(something) and not os.path.isfile(
os.path.join(something, "__init__.py")
):
skip_subtrees: list[str] = []
for root, _, files in os.walk(something):
if any(root.startswith(s) for s in skip_subtrees):
# Skip subtree of already discovered package.
continue
if "__init__.py" in files:
skip_subtrees.append(root)
yield root
else:
yield from (
os.path.join(root, file)
for file in files
if file.endswith(".py")
)
else:
yield something

def check(self, files_or_modules: Sequence[str] | str) -> None:
"""Main checking entry: check a list of files or modules from their name.

Expand All @@ -712,8 +686,6 @@ def check(self, files_or_modules: Sequence[str] | str) -> None:
DeprecationWarning,
)
files_or_modules = (files_or_modules,) # type: ignore[assignment]
if self.config.recursive:
files_or_modules = tuple(self._discover_files(files_or_modules))
if self.config.from_stdin:
if len(files_or_modules) != 1:
raise exceptions.InvalidArgsError(
Expand Down Expand Up @@ -1068,7 +1040,6 @@ def open(self) -> None:
self.config.extension_pkg_whitelist
)
self.stats.reset_message_count()
self._ignore_paths = self.linter.config.ignore_paths

def generate_reports(self) -> int | None:
"""Close the whole package /module, it's time to make reports !
Expand Down
Loading