Skip to content

Commit

Permalink
add pytest
Browse files Browse the repository at this point in the history
  • Loading branch information
sarahyurick committed Aug 13, 2024
1 parent c69d1ac commit 1aa2854
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 2 deletions.
4 changes: 2 additions & 2 deletions crossfit/op/tokenize.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,10 +62,10 @@ def tokenize_strings(self, sentences, max_length=None):
self.padding_side = tokenizer.padding_side
self.pad_token_id = tokenizer.pad_token_id

if isinstance(sentences, pd.Series):
sentences = cudf.from_pandas(sentences)
if isinstance(sentences, cudf.Series):
sentences = sentences.to_arrow().to_pylist()
elif isinstance(sentences, pd.Series):
sentences = sentences.to_list()

with torch.no_grad():
tokenized_data = tokenizer.batch_encode_plus(
Expand Down
11 changes: 11 additions & 0 deletions tests/op/test_tokenize.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
cp = pytest.importorskip("cupy")
cudf = pytest.importorskip("cudf")
dask_cudf = pytest.importorskip("dask_cudf")
dd = pytest.importorskip("dask.dataframe")
pd = pytest.importorskip("pandas")
transformers = pytest.importorskip("transformers")
torch = pytest.importorskip("torch")

Expand Down Expand Up @@ -144,3 +146,12 @@ def test_clip_tokens_no_clipping_needed():
assert result["attention_mask"].shape == (2, 3)
assert torch.equal(result["input_ids"].to("cpu"), torch.tensor([[1, 2, 3], [4, 5, 6]]))
assert torch.equal(result["attention_mask"].to("cpu"), torch.tensor([[1, 1, 1], [1, 1, 1]]))


def test_tokenize_strings_cpu(model_name="microsoft/deberta-v3-base"):
model = cf.HFModel(model_name)
tokenizer = op.Tokenizer(model, cols=["text"], tokenizer_type="spm")
input_strings = ["hello world", "this is a sentence"]
ddf = dd.from_pandas(pd.DataFrame({"text": input_strings}), npartitions=1)
results = tokenizer(ddf)
results = results.compute()

0 comments on commit 1aa2854

Please sign in to comment.