Skip to content

Commit

Permalink
Temporarily revert new loss implementations (#916)
Browse files Browse the repository at this point in the history
* Revert "Cross entropy fix (#647)"

This reverts commit c8ac07f.

* Cherry pick MPS Torch bug to get CI unstuck
  • Loading branch information
danieldk authored Jan 8, 2024
1 parent 886231f commit 816ea33
Show file tree
Hide file tree
Showing 8 changed files with 284 additions and 1,191 deletions.
6 changes: 5 additions & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,11 @@ jobs:
run: |
pip install "protobuf~=3.20.0" "tensorflow~=2.5.0"
pip install "mxnet; sys_platform != 'win32'"
pip install "torch!=1.13.0" --extra-index-url https://download.pytorch.org/whl/cpu
pip install "torch!=1.13.0; sys_platform!='darwin'" --extra-index-url https://download.pytorch.org/whl/cpu
# there is a bug related to MPS devices in github macos runners that
# will be fixed in torch v2.1.1
# https://github.com/pytorch/pytorch/pull/111576
pip install "torch>=2.1.1; sys_platform=='darwin'" --extra-index-url https://download.pytorch.org/whl/cpu
pip install "numpy~=1.23.0; python_version=='3.10' and sys_platform=='win32'"
pip install "numpy<1.24.0"
pip install -r requirements.txt
Expand Down
5 changes: 1 addition & 4 deletions examples/mnist.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
"""
# pip install thinc ml_datasets typer
from thinc.api import Model, chain, Relu, Softmax, Adam
from thinc.api import CategoricalCrossentropy
import ml_datasets
from wasabi import msg
from tqdm import tqdm
Expand All @@ -22,7 +21,6 @@ def main(
)
# Load the data
(train_X, train_Y), (dev_X, dev_Y) = ml_datasets.mnist()
loss_func = CategoricalCrossentropy()
# Set any missing shapes for the model.
model.initialize(X=train_X[:5], Y=train_Y[:5])
train_data = model.ops.multibatch(batch_size, train_X, train_Y, shuffle=True)
Expand All @@ -32,8 +30,7 @@ def main(
for i in range(n_iter):
for X, Y in tqdm(train_data, leave=False):
Yh, backprop = model.begin_update(X)
grad, loss = loss_func(Yh, Y)
backprop(grad)
backprop(Yh - Y)
model.finish_update(optimizer)
# Evaluate and print progress
correct = 0
Expand Down
8 changes: 0 additions & 8 deletions thinc/legacy/__init__.py

This file was deleted.

285 changes: 0 additions & 285 deletions thinc/legacy/loss.py

This file was deleted.

Loading

0 comments on commit 816ea33

Please sign in to comment.