Skip to content

Commit

Permalink
Fix dltensor operator tests (#3984)
Browse files Browse the repository at this point in the history
* Fix dltensor operator tests (missing imports)
* Fix backend_impl shadowing Pipeline import

Signed-off-by: Kamil Tokarski <[email protected]>
  • Loading branch information
stiepan committed Jun 14, 2022
1 parent ac98939 commit ce25d72
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 4 deletions.
6 changes: 4 additions & 2 deletions dali/test/python/test_dltensor_operator.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2019, 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -27,13 +27,15 @@
def setup_pytorch():
global torch_dlpack
global torch
import torch as torch
import torch
import torch.utils.dlpack as torch_dlpack
global torch_stream
torch_stream = torch.cuda.Stream()


def setup_mxnet():
global mxnd
from mxnet import ndarray as mxnd


def setup_cupy():
Expand Down
4 changes: 2 additions & 2 deletions dali/test/python/test_external_source_pytorch_dlpack.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
# Copyright (c) 2020, 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -16,8 +16,8 @@
import nvidia.dali.fn as fn
import nvidia.dali.ops as ops
import torch
from nvidia.dali import Pipeline
from nvidia.dali.backend_impl import * # noqa: F401, F403
from nvidia.dali import Pipeline
from torch.utils.dlpack import to_dlpack, from_dlpack

from test_utils import check_output
Expand Down

0 comments on commit ce25d72

Please sign in to comment.