Skip to content

Commit

Permalink
Bump version to 0.18.0.dev (#1717)
Browse files Browse the repository at this point in the history
  • Loading branch information
milocress authored Jan 30, 2025
1 parent cc0df9f commit a02b90d
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 17 deletions.
2 changes: 1 addition & 1 deletion llmfoundry/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@

"""The LLM Foundry Version."""

__version__ = '0.17.0.dev0'
__version__ = '0.18.0.dev0'
16 changes: 0 additions & 16 deletions llmfoundry/models/mpt/modeling_mpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,6 @@
from llmfoundry.models.layers.norm import LPLayerNorm # type: ignore
# isort: on

from llmfoundry.utils.warnings import VersionedDeprecationWarning

log = logging.getLogger(__name__)

CROSS_ENTROPY_IGNORE_INDEX = -100
Expand Down Expand Up @@ -1348,7 +1346,6 @@ def compute_loss_from_logits(
shift_labels: bool,
labels: torch.Tensor,
loss_fn: nn.Module,
sample_weighing_factor: Optional[torch.Tensor] = None,
) -> torch.Tensor:
targets = get_targets(labels) if shift_labels else labels

Expand All @@ -1361,18 +1358,6 @@ def compute_loss_from_logits(
loss = losses.sum()
else:
loss = losses.sum() / (targets != loss_fn.ignore_index).sum()
if sample_weighing_factor is not None:
warnings.warn(
VersionedDeprecationWarning(
message='sample_weighing_factor has been deprecated!',
remove_version='0.17.0',
),
)
if sample_weighing_factor.shape[0] > 1:
raise ValueError(
'Sample weighing factor is not supported when batch["sample_weighing_factor"].shape[0] > 1.',
)
loss = loss * sample_weighing_factor[0].item()

return loss

Expand Down Expand Up @@ -1481,7 +1466,6 @@ def loss(self, outputs: CausalLMOutputWithPast,
self.shift_labels,
batch['labels'],
self.loss_fn,
batch.get('sample_weighing_factor', None),
)

if self.config.ffn_config['ffn_type'] in ffns_with_megablocks:
Expand Down

0 comments on commit a02b90d

Please sign in to comment.