Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion .agents/skills/usethis-qa-static-checks/SKILL.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ description: Perform static code checks
compatibility: usethis, Python, prek, basedpyright
license: MIT
metadata:
version: "1.1"
version: "1.2
---

# Static Checks
Expand All @@ -15,3 +15,9 @@ To perform static checks on the codebase, run:
uv run prek run -a
uv run basedpyright
```

Note that we are interested in both errors and warnings from these tools - we should always fix both.

## When to run these checks:

Before submitting changes for review, **always** run these static checks. This should be done every time, even for small changes, to avoid slowing down the code review process unnecessarily.
2 changes: 1 addition & 1 deletion src/usethis/_backend/uv/deps.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,6 @@ def get_default_groups_via_uv() -> list[str]:
PyprojectTOMLManager()[["tool", "uv", "default-groups"]]
)
except (KeyError, ValidationError):
default_groups = []
default_groups: list[str] = []

return default_groups
2 changes: 1 addition & 1 deletion src/usethis/_backend/uv/python.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def get_supported_uv_minor_python_versions() -> list[PythonVersion]:
except (MissingRequiresPythonError, PyprojectTOMLNotFoundError):
return [PythonVersion.from_interpreter()]

versions = set()
versions: set[str] = set()
for version in get_available_uv_python_versions():
# N.B. a standard range won't include alpha versions.
if requires_python.contains(version):
Expand Down
2 changes: 1 addition & 1 deletion src/usethis/_core/status.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def use_development_status(
mgr[["project", "classifiers"]]
)
except (KeyError, ValidationError):
existing_classifiers = []
existing_classifiers: list[str] = []
existing_status_classifiers = {
classifier
for classifier in existing_classifiers
Expand Down
2 changes: 1 addition & 1 deletion src/usethis/_deps.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def register_default_group(group: str) -> None:
default_groups = get_default_groups()

# Choose which groups we want to add
groups_to_add = []
groups_to_add: list[str] = []
if group not in default_groups:
groups_to_add.append(group)
# Add "dev" if section is empty or if we're adding a new group and "dev" isn't present
Expand Down
2 changes: 1 addition & 1 deletion src/usethis/_file/ini/io_.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ def __delitem__(self, keys: Sequence[Key]) -> None:
elif len(keys) == 2:
(section_key, option_key) = keys

section_strkeys = []
section_strkeys: list[str] = []
for section_strkey in _itermatches(self.get().sections(), key=section_key):
section_strkeys.append(section_strkey)

Expand Down
4 changes: 2 additions & 2 deletions src/usethis/_file/pyproject_toml/requires_python.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def get_required_minor_python_versions() -> list[PythonVersion]:
# always deal with just major version 3 in practice.
min_minor_in_spec = min(all_minors)

supported_versions = []
supported_versions: list[PythonVersion] = []
# Generate all major.minor combinations in range
# Basically, do a sophisticated brute-force search
for major in range(min_version[0], max_version[0] + 1):
Expand All @@ -101,7 +101,7 @@ def get_required_minor_python_versions() -> list[PythonVersion]:

# Get patch versions mentioned for this major.minor in the specifier
# The extremes will lie +/- 1 from any named patch version
patches_to_check = set()
patches_to_check: set[int] = set()
major_minor_key = (major, minor)
if major_minor_key in patches_by_short:
for patch in patches_by_short[major_minor_key]:
Expand Down
6 changes: 3 additions & 3 deletions src/usethis/_file/yaml/update.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,11 +127,11 @@ def _shared_id_sequences(*seqs: Sequence[object]) -> Sequence[list[int]]:
# we can store this information as a list of elements, where the index corresponds
# to the integer representation.

iseqs = []
rep = []
iseqs: list[list[int]] = []
rep: list[object] = []

for seq in seqs:
iseq = []
iseq: list[int] = []
for element in seq:
for idx, rep_element in enumerate(rep):
if element == rep_element:
Expand Down
4 changes: 2 additions & 2 deletions src/usethis/_integrations/ci/bitbucket/pipeweld.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def get_pipeweld_pipeline_from_default(
default = model.pipelines.default

if default is None:
items = []
items: list[schema.StepItem | schema.ParallelItem | schema.StageItem] = []
elif isinstance(default.root, schema.ImportPipeline):
msg = (
"Cannot add step to default pipeline in 'bitbucket-pipelines.yml' because "
Expand Down Expand Up @@ -117,7 +117,7 @@ def apply_pipeweld_instruction_via_model(
default = pipelines.default

if default is None:
items = []
items: list[schema.StepItem | schema.ParallelItem | schema.StageItem] = []
elif isinstance(default.root, schema.ImportPipeline):
msg = (
f"Cannot add step '{step_to_insert.name}' to default pipeline in "
Expand Down
4 changes: 2 additions & 2 deletions src/usethis/_integrations/ci/bitbucket/steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,7 +375,7 @@ def _censor_stage_item_step(
) -> schema.StageItem | None:
step1s = item.stage.steps

new_step1s = []
new_step1s: list[schema.Step1] = []
for step1 in step1s:
if bitbucket_steps_are_equivalent(step1tostep(step1), step):
continue
Expand Down Expand Up @@ -474,7 +474,7 @@ def _get_steps_in_pipeline(pipeline: schema.Pipeline) -> list[schema.Step]:

items = pipeline.root.root

steps = []
steps: list[schema.Step] = []
for item in items:
steps.extend(get_steps_in_pipeline_item(item))

Expand Down
2 changes: 1 addition & 1 deletion src/usethis/_integrations/pre_commit/hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def insert_repo(
# after the last precedent

inserted = False
repos = []
repos: list[schema.LocalRepo | schema.UriRepo | schema.MetaRepo] = []

if predecessor is None:
# If there is no predecessor, we can just append the new repo
Expand Down
14 changes: 7 additions & 7 deletions src/usethis/_integrations/project/imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def get_layered_architectures(pkg_name: str) -> dict[str, LayeredArchitecture]:
"""
graph = _get_graph(pkg_name)

arch_by_module = {}
arch_by_module: dict[str, LayeredArchitecture] = {}

for module in sorted(graph.modules):
arch = _get_module_layered_architecture(module, graph=graph)
Expand All @@ -61,14 +61,14 @@ def _get_module_layered_architecture(
) -> LayeredArchitecture:
deps_by_module = _get_child_dependencies(module, graph=graph)

layered = set()
layers = []
layered: set[str] = set()
layers: list[set[str]] = []

for _ in range(len(deps_by_module)):
# Form a layer: cycle through all siblings. For ones with no deps, add them
# to the layer and ignore them in the next iteration.

layer = set()
layer: set[str] = set()
for m, deps in deps_by_module.items():
if m in layered:
continue
Expand All @@ -83,7 +83,7 @@ def _get_module_layered_architecture(
layers.append(layer)
layered.update(layer)

excluded = set()
excluded: set[str] = set()
for m in deps_by_module:
if m not in layered:
excluded.add(m)
Expand Down Expand Up @@ -124,7 +124,7 @@ def _get_child_dependencies(
"""
children = sorted(graph.find_children(module))

deps_by_module = {}
deps_by_module: dict[str, set[str]] = {}
for child in children:
downstreams = graph.find_upstream_modules(module=child, as_package=True)
downstreams = _filter_to_submodule(downstreams, submodule=module)
Expand All @@ -135,7 +135,7 @@ def _get_child_dependencies(


def _filter_to_submodule(modules: set[str], *, submodule: str) -> set[str]:
filtered = set()
filtered: set[str] = set()
for module in modules:
if module.startswith(submodule + "."):
filtered.add(_narrow_to_submodule(module, submodule=submodule))
Expand Down
8 changes: 4 additions & 4 deletions src/usethis/_integrations/pydantic/dump.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def _fancy_model_dump_list(
if not isinstance(reference, list):
reference = []

x = []
x: list[ModelRepresentation] = []
for value, ref in zip_longest(model, reference, fillvalue=_FILL_VALUE):
if value is _FILL_VALUE:
# we've exhausted all the content.
Expand All @@ -96,7 +96,7 @@ def _fancy_model_dump_dict(
) -> ModelRepresentation:
if order_by_cls is None:
order_by_cls = {}
d = {}
d: dict[str, ModelRepresentation] = {}
for key, value in model.items():
if reference is None:
value_ref = None
Expand All @@ -122,7 +122,7 @@ def _fancy_model_dump_base_model(
if order_by_cls is None:
order_by_cls = {}

d = {}
d: dict[str, ModelRepresentation] = {}
for key, value in model:
default_value = model.__class__.model_fields[key].default

Expand Down Expand Up @@ -158,7 +158,7 @@ def _fancy_model_dump_base_model(
except KeyError:
return d

ordered_d = {}
ordered_d: dict[str, ModelRepresentation] = {}
for key in order:
if key in d:
ordered_d[key] = d.pop(key)
Expand Down
2 changes: 1 addition & 1 deletion src/usethis/_integrations/sonarqube/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def _get_sonarqube_exclusions() -> list[str]:
PyprojectTOMLManager()[["tool", "usethis", "sonarqube", "exclusions"]]
)
except (FileNotFoundError, KeyError, ValidationError):
exclusions = []
exclusions: list[str] = []
for exclusion in exclusions:
TypeAdapter(str).validate_python(exclusion)

Expand Down
2 changes: 1 addition & 1 deletion src/usethis/_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ def print_keys(keys: Sequence[Key]) -> str:
>>> print_keys([re.compile(r"importlinter:contracts:.*")])
'<REGEX("importlinter:contracts:.*")>'
"""
components = []
components: list[str] = []
for key in keys:
if isinstance(key, str):
components.append(key)
Expand Down
8 changes: 4 additions & 4 deletions src/usethis/_pipeweld/func.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def _partition_series_component(
self, component: Series, *, predecessor: str | None
) -> tuple[Partition, list[Instruction]]:
partitions: list[Partition] = []
instructions = []
instructions: list[Instruction] = []
for subcomponent in component.root:
partition, these_instructions = self.partition_component(
subcomponent,
Expand Down Expand Up @@ -411,7 +411,7 @@ def _parallel_merge_partitions(
or top_ranked_prerequisite_endpoints
)

instructions = []
instructions: list[Instruction] = []
if prerequisite_component is not None:
new_instructions, _ = _get_instructions_for_insertion(
prerequisite_component, after=predecessor
Expand Down Expand Up @@ -475,7 +475,7 @@ def _get_instructions_for_insertion(
if isinstance(component, str):
return [InsertSuccessor(after=after, step=component)], component
elif isinstance(component, Series):
instructions = []
instructions: list[Instruction] = []
for subcomponent in component.root:
new_instructions, endpoint = _get_instructions_for_insertion(
subcomponent, after=after
Expand Down Expand Up @@ -589,7 +589,7 @@ def get_endpoint(component: str | Series | DepGroup | Parallel) -> str:
msg = "No endpoints are defined for a Series with no steps."
raise ValueError(msg)
elif isinstance(component, Parallel):
endpoints = []
endpoints: list[str] = []
for subcomponent in component.root:
with contextlib.suppress(ValueError):
endpoints.append(get_endpoint(subcomponent))
Expand Down
4 changes: 2 additions & 2 deletions src/usethis/_tool/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
if TYPE_CHECKING:
from collections.abc import Sequence

from usethis._io import KeyValueFileManager
from usethis._io import Key, KeyValueFileManager
from usethis._tool.config import ConfigItem
from usethis._tool.rule import Rule

Expand Down Expand Up @@ -338,7 +338,7 @@ def _add_config_item(
# using at least a shared subset of those e.g. ["tool", "ruff"], and
# preferentially add to the highest-priority file manager which already has
# config at that shared key sequence.
shared_keys = []
shared_keys: list[Key] = []
for key in entry.keys:
shared_keys.append(key)
new_file_managers = [
Expand Down
2 changes: 1 addition & 1 deletion src/usethis/_tool/impl/base/deptry.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def ignored_rules(self) -> list[Rule]:
try:
rules = TypeAdapter(list[Rule]).validate_python(file_manager[keys])
except (KeyError, FileNotFoundError, ValidationError):
rules = []
rules: list[Rule] = []

return rules

Expand Down
4 changes: 2 additions & 2 deletions src/usethis/_tool/impl/base/pytest.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def get_bitbucket_steps(

backend = get_backend()

steps = []
steps: list[bitbucket_schema.Step] = []
for version in versions:
if backend is BackendEnum.uv:
step = bitbucket_schema.Step(
Expand Down Expand Up @@ -154,7 +154,7 @@ def get_bitbucket_steps(

@final
def get_managed_bitbucket_step_names(self) -> list[str]:
names = set()
names: set[str] = set()
for step in get_steps_in_default():
if step.name is not None:
match = re.match(r"^Test on 3\.\d{1,2}$", step.name)
Expand Down
10 changes: 5 additions & 5 deletions src/usethis/_tool/impl/base/ruff.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def print_how_to_use_formatter(self) -> None:

@final
def pre_commit_config(self) -> PreCommitConfig:
repo_configs = []
repo_configs: list[PreCommitRepoConfig] = []
if self.is_linter_used():
repo_configs.append(
PreCommitRepoConfig(
Expand Down Expand Up @@ -139,7 +139,7 @@ def get_bitbucket_steps(
) -> list[bitbucket_schema.Step]:
backend = get_backend()

steps = []
steps: list[bitbucket_schema.Step] = []
if self.is_linter_used():
if backend is BackendEnum.uv:
steps.append(
Expand Down Expand Up @@ -209,7 +209,7 @@ def selected_rules(self) -> list[Rule]:
try:
rules = TypeAdapter(list[Rule]).validate_python(file_manager[keys])
except (KeyError, FileNotFoundError, ValidationError):
rules = []
rules: list[Rule] = []

return rules

Expand All @@ -221,7 +221,7 @@ def ignored_rules(self) -> list[Rule]:
try:
rules = TypeAdapter(list[Rule]).validate_python(file_manager[keys])
except (KeyError, FileNotFoundError, ValidationError):
rules = []
rules: list[Rule] = []

return rules

Expand Down Expand Up @@ -252,7 +252,7 @@ def get_ignored_rules_in_glob(self, glob: str) -> list[Rule]:
try:
rules = TypeAdapter(list[Rule]).validate_python(file_manager[keys])
except (KeyError, FileNotFoundError, ValidationError):
rules = []
rules: list[Rule] = []

return rules

Expand Down
Loading
Loading