Skip to content

Commit

Permalink
Remove AMDGPU dependency
Browse files Browse the repository at this point in the history
  • Loading branch information
michel2323 committed Jul 11, 2023
1 parent 4f89411 commit 9e9700c
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 32 deletions.
7 changes: 3 additions & 4 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
name = "ProxAL"
uuid = "12c3852d-bf95-4e7b-be60-68937c3c927b"
authors = ["Anirudh Subramanyam <[email protected]>", "Youngdae Kim <[email protected]>", "Francois Pacaud <[email protected]>", "Michel Schanen <[email protected]>"]
version = "0.10.0"
version = "0.11.0"

[deps]
AMDGPU = "21141c5a-9bdb-4563-92ae-f87d6854732e"
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
CatViews = "81a5f4ea-a946-549a-aa7e-2a7f63a27d31"
DelimitedFiles = "8bb1440f-4735-579b-a4ab-409b98df4dab"
Expand All @@ -23,10 +23,9 @@ TOML = "fa267f1f-6049-4f14-aa54-33bafae1ed76"

[compat]
Adapt = "3"
AMDGPU = "0.4"
CUDA = "4.1"
CatViews = "1"
ExaAdmm = "0.5"
ExaAdmm = "0.6"
ExaPF = "0.9"
ExaTron = "3"
HDF5 = "0.16"
Expand Down
8 changes: 4 additions & 4 deletions src/ExaAdmmBackend/ExaAdmmBackend.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
module ExaAdmmBackend

using Adapt
using CUDA
using AMDGPU
import MPI
using KernelAbstractions
using ExaAdmm
Expand Down Expand Up @@ -129,9 +129,9 @@ function ExaAdmm.AdmmEnv(opfdata, rho_va::Float64, rho_pq::Float64; use_gpu=fals
T = Float64
if use_gpu
if !isa(ka_device, Nothing)
VT = typeof(ExaAdmm.KAArray{Float64}(0, ka_device))
VI = typeof(ExaAdmm.KAArray{Int}(0, ka_device))
MT = typeof(ExaAdmm.KAArray{Float64}(0, 0, ka_device))
VT = typeof(adapt(ka_device, Vector{Float64}(undef, 0)))
VI = typeof(adapt(ka_device, Vector{Int}(undef, 0)))
MT = typeof(adapt(ka_device, Matrix{Float64}(undef, 0, 0)))
else
VT = CuVector{Float64}
VI = CuVector{Int}
Expand Down
12 changes: 0 additions & 12 deletions test/blockmodel.jl
Original file line number Diff line number Diff line change
Expand Up @@ -24,21 +24,9 @@ load_file = joinpath(DATA_DIR, "mp_demand", "$(case)_oneweek_168")

solver_list = ["ExaAdmmCPU"]
if CUDA.has_cuda_gpu()
function ProxAL.ExaAdmm.KAArray{T}(n::Int, device::CUDABackend) where {T}
return CuArray{T}(undef, n)
end
function ProxAL.ExaAdmm.KAArray{T}(n1::Int, n2::Int, device::CUDABackend) where {T}
return CuArray{T}(undef, n1, n2)
end
gpu_device = CUDABackend()
push!(solver_list, "ExaAdmmGPUKA")
elseif AMDGPU.has_rocm_gpu()
function ProxAL.ExaAdmm.KAArray{T}(n::Int, device::ROCBackend) where {T}
return ROCArray{T}(undef, n)
end
function ProxAL.ExaAdmm.KAArray{T}(n1::Int, n2::Int, device::ROCBackend) where {T}
return ROCArray{T}(undef, n1, n2)
end
gpu_device = ROCBackend()
push!(solver_list, "ExaAdmmGPUKA")
end
Expand Down
12 changes: 0 additions & 12 deletions test/convergence.jl
Original file line number Diff line number Diff line change
Expand Up @@ -39,21 +39,9 @@ algparams.verbose = 0
solver_list = ["Ipopt", "ExaAdmmCPU"]
if CUDA.has_cuda_gpu()
push!(solver_list, "ExaAdmmGPU")
function ProxAL.ExaAdmm.KAArray{T}(n::Int, device::CUDABackend) where {T}
return CuArray{T}(undef, n)
end
function ProxAL.ExaAdmm.KAArray{T}(n1::Int, n2::Int, device::CUDABackend) where {T}
return CuArray{T}(undef, n1, n2)
end
gpu_device = CUDABackend()
push!(solver_list, "ExaAdmmGPUKA")
elseif AMDGPU.has_rocm_gpu()
function ProxAL.ExaAdmm.KAArray{T}(n::Int, device::ROCBackend) where {T}
return ROCArray{T}(undef, n)
end
function ProxAL.ExaAdmm.KAArray{T}(n1::Int, n2::Int, device::ROCBackend) where {T}
return ROCArray{T}(undef, n1, n2)
end
gpu_device = ROCBackend()
push!(solver_list, "ExaAdmmGPUKA")
end
Expand Down

0 comments on commit 9e9700c

Please sign in to comment.