Skip to content

build(deps): bump mypy from 1.11.0 to 1.15.0 in /requirements #20759

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Apr 28, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion requirements/typing.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
mypy==1.11.0
mypy==1.15.0
torch==2.7.0

types-Markdown
Expand Down
3 changes: 1 addition & 2 deletions src/lightning/fabric/fabric.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,8 +367,7 @@ def setup_dataloaders(
)
for dataloader in dataloaders
]
dataloaders = dataloaders[0] if len(dataloaders) == 1 else dataloaders
return dataloaders # type: ignore[return-value]
return dataloaders[0] if len(dataloaders) == 1 else dataloaders

def _setup_dataloader(
self, dataloader: DataLoader, use_distributed_sampler: bool = True, move_to_device: bool = True
Expand Down
6 changes: 3 additions & 3 deletions src/lightning/pytorch/callbacks/lr_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,13 +251,13 @@ def _remap_keys(self, names: list[list[str]], token: str = "/pg1") -> None:
elif new_name not in self.lrs:
self.lrs[new_name] = []

def _extract_momentum(self, param_group: dict[str, list], name: str, use_betas: bool) -> dict[str, float]:
def _extract_momentum(self, param_group: dict[str, list[float]], name: str, use_betas: bool) -> dict[str, float]:
if not self.log_momentum:
return {}

momentum = param_group["betas"][0] if use_betas else param_group.get("momentum", 0)
self.last_momentum_values[name] = momentum
return {name: momentum}
self.last_momentum_values[name] = momentum # type: ignore[assignment]
return {name: momentum} # type: ignore[dict-item]

def _extract_weight_decay(self, param_group: dict[str, Any], name: str) -> dict[str, Any]:
"""Extracts the weight decay statistics from a parameter group."""
Expand Down
2 changes: 1 addition & 1 deletion src/lightning/pytorch/core/saving.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def _load_state(
obj.on_load_checkpoint(checkpoint)

# load the state_dict on the model automatically
keys = obj.load_state_dict(checkpoint["state_dict"], strict=strict)
keys = obj.load_state_dict(checkpoint["state_dict"], strict=strict) # type: ignore[arg-type]

if not strict:
if keys.missing_keys:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def _generate_sync_fn(self) -> None:
fn = self.no_op if self.fn is None or not self.should or self.rank_zero_only else self.fn
# save the function as `_fn` as the meta are being re-created and the object references need to match.
# ignore typing, bad support for `partial`: mypy/issues/1484
self._fn: Callable = partial(fn, reduce_op=self.op, group=self.group) # type: ignore[arg-type,operator,misc]
self._fn: Callable = partial(fn, reduce_op=self.op, group=self.group) # type: ignore[unused-ignore]

@property
def __call__(self) -> Any:
Expand Down
Loading