Skip to content

Commit

Permalink
[NDTensorsMappedArraysExt] Support for using MappedArrays as data of …
Browse files Browse the repository at this point in the history
…ITensors (#1505)

* [NDTensorsMappedArraysExt] Support for using MappedArrays as data of ITensors

* [NDTensors] Bump to v0.3.32
  • Loading branch information
mtfishman authored Jun 20, 2024
1 parent 7b2ada9 commit 984d814
Show file tree
Hide file tree
Showing 9 changed files with 89 additions and 8 deletions.
4 changes: 3 additions & 1 deletion NDTensors/Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "NDTensors"
uuid = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf"
authors = ["Matthew Fishman <[email protected]>"]
version = "0.3.31"
version = "0.3.32"

[deps]
Accessors = "7d9f7c33-5ae7-4f3b-8dc6-eff91059b697"
Expand Down Expand Up @@ -36,6 +36,7 @@ AMDGPU = "21141c5a-9bdb-4563-92ae-f87d6854732e"
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527"
HDF5 = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f"
MappedArrays = "dbb5928d-eab1-5f90-85c2-b9b0edb7c900"
Metal = "dde4c033-4e86-420c-a63e-0dd931031962"
Octavian = "6fd5a793-0b7e-452c-907f-f8bfe9c57db4"
TBLIS = "48530278-0828-4a49-9772-0f3830dfa1e9"
Expand All @@ -46,6 +47,7 @@ NDTensorsAMDGPUExt = ["AMDGPU", "GPUArraysCore"]
NDTensorsCUDAExt = ["CUDA", "GPUArraysCore"]
NDTensorsGPUArraysCoreExt = "GPUArraysCore"
NDTensorsHDF5Ext = "HDF5"
NDTensorsMappedArraysExt = ["MappedArrays"]
NDTensorsMetalExt = ["GPUArraysCore", "Metal"]
NDTensorsOctavianExt = "Octavian"
NDTensorsTBLISExt = "TBLIS"
Expand Down
25 changes: 25 additions & 0 deletions NDTensors/ext/NDTensorsMappedArraysExt/NDTensorsMappedArraysExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
module NDTensorsMappedArraysExt
using MappedArrays: AbstractMappedArray
using NDTensors: NDTensors
function NDTensors.similar(arraytype::Type{<:AbstractMappedArray}, dims::Tuple{Vararg{Int}})
return similar(Array{eltype(arraytype)}, dims)
end
function NDTensors.similartype(storagetype::Type{<:AbstractMappedArray})
return Array{eltype(storagetype),ndims(storagetype)}
end
function NDTensors.similartype(
storagetype::Type{<:AbstractMappedArray}, dims::Tuple{Vararg{Int}}
)
return Array{eltype(storagetype),length(dims)}
end

using MappedArrays: ReadonlyMappedArray
using NDTensors: AllowAlias
# It is a bit unfortunate that we have to define this, it fixes an ambiguity
# error with MappedArrays.
function (arraytype::Type{ReadonlyMappedArray{T,N,A,F}} where {T,N,A<:AbstractArray,F})(
::AllowAlias, a::AbstractArray
)
return a
end
end
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,14 @@ end
return set_type_parameter(type, eltype, param)
end

# These are generic fallback definitions. By convention,
# this is very commonly true of `AbstractArray` subtypes
# but it may not be correct, but it is very convenient
# to define this to make more operations "just work"
# on most AbstractArrays.
position(type::Type{<:AbstractArray}, ::typeof(eltype)) = Position(1)
position(type::Type{<:AbstractArray}, ::typeof(ndims)) = Position(2)

for wrapper in [:PermutedDimsArray, :(Base.ReshapedArray), :SubArray]
@eval begin
position(type::Type{<:$wrapper}, ::typeof(eltype)) = Position(1)
Expand Down
5 changes: 5 additions & 0 deletions NDTensors/src/lib/TypeParameterAccessors/test/test_basics.jl
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,11 @@ using NDTensors.TypeParameterAccessors:
include("utils/test_inferred.jl")
@testset "TypeParameterAccessors basics" begin
@testset "Get parameters" begin
@test_inferred type_parameter(AbstractArray{Float64}, 1) == Float64 wrapped = true
@test_inferred type_parameter(AbstractArray{Float64}, Position(1)) == Float64
@test_inferred type_parameter(AbstractArray{Float64}, eltype) == Float64
@test_inferred type_parameter(AbstractMatrix{Float64}, ndims) == 2

@test_inferred type_parameter(Array{Float64}, 1) == Float64 wrapped = true
@test_inferred type_parameter(Array{Float64}, Position(1)) == Float64
@test_inferred type_parameter(Val{3}) == 3
Expand Down
13 changes: 7 additions & 6 deletions NDTensors/test/Project.toml
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
[deps]
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e"
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
Combinatorics = "861a8166-3701-5b0c-9a16-15d98fcdc6aa"
Compat = "34da2185-b29b-5c13-b0c7-acf172513d20"
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
Dictionaries = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4"
EllipsisNotation = "da5c29d0-fa7d-589e-88eb-ea29b0a81949"
FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b"
GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527"
ITensors = "9136182c-28ba-11e9-034c-db9fb085ebd5"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
MappedArrays = "dbb5928d-eab1-5f90-85c2-b9b0edb7c900"
NDTensors = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf"
Octavian = "6fd5a793-0b7e-452c-907f-f8bfe9c57db4"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Expand All @@ -22,11 +23,11 @@ TensorOperations = "6aa20fa7-93e2-5fca-9bc0-fbd0db3c71a2"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[extras]
AMDGPU = "21141c5a-9bdb-4563-92ae-f87d6854732e"
cuTENSOR = "011b41b2-24ef-40a8-b3eb-fa098493e9e1"
Metal = "dde4c033-4e86-420c-a63e-0dd931031962"

[compat]
Metal = "1.1.0"
cuTENSOR = "2.0"

[extras]
AMDGPU = "21141c5a-9bdb-4563-92ae-f87d6854732e"
Metal = "dde4c033-4e86-420c-a63e-0dd931031962"
cuTENSOR = "011b41b2-24ef-40a8-b3eb-fa098493e9e1"
4 changes: 4 additions & 0 deletions NDTensors/test/ext/Project.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[deps]
ITensors = "9136182c-28ba-11e9-034c-db9fb085ebd5"
MappedArrays = "dbb5928d-eab1-5f90-85c2-b9b0edb7c900"
NDTensors = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf"
12 changes: 12 additions & 0 deletions NDTensors/test/ext/runtests.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
@eval module $(gensym())
using Test: @testset
@testset "$(@__DIR__)" begin
filenames = filter(readdir(@__DIR__)) do f
startswith("test_")(f) && endswith(".jl")(f)
end
@testset "Test $(@__DIR__)/$filename" for filename in filenames
println("Running $(@__DIR__)/$filename")
include(filename)
end
end
end
24 changes: 24 additions & 0 deletions NDTensors/test/ext/test_mappedarraysext.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
@eval module $(gensym())
using ITensors: Index, itensor
using LinearAlgebra: qr, svd
using MappedArrays: mappedarray
using Test: @test, @testset
f(i::Int...) = float(sum(iⱼ -> iⱼ^2, i))
f(i::CartesianIndex) = f(Tuple(i)...)
@testset "NDTensorsMappedArraysExt" begin
a = mappedarray(f, CartesianIndices((2, 2)))
b = copy(a)
i, j = Index.((2, 2))
ta = itensor(a, i, j)
tb = itensor(b, i, j)
@test ta tb
@test ta[i => 1, j => 2] tb[i => 1, j => 2]
@test 2 * ta 2 * tb
@test ta + ta tb + tb
@test ta * ta tb * tb
ua, sa, va = svd(ta, i)
@test ua * sa * va ta
qa, ra = qr(ta, i)
@test qa * ra ta
end
end
2 changes: 1 addition & 1 deletion NDTensors/test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ using SafeTestsets: @safetestset
filenames = filter(readdir(@__DIR__)) do f
startswith("test_")(f) && endswith(".jl")(f)
end
for dir in ["lib", "ITensors"]
for dir in ["lib", "ext", "ITensors"]
push!(filenames, joinpath(dir, "runtests.jl"))
end
@testset "Test $(@__DIR__)/$filename" for filename in filenames
Expand Down

2 comments on commit 984d814

@mtfishman
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator register subdir=NDTensors

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/109461

Tip: Release Notes

Did you know you can add release notes too? Just add markdown formatted text underneath the comment after the text
"Release notes:" and it will be added to the registry PR, and if TagBot is installed it will also be added to the
release that TagBot creates. i.e.

@JuliaRegistrator register

Release notes:

## Breaking changes

- blah

To add them here just re-invoke and the PR will be updated.

Tagging

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a NDTensors-v0.3.32 -m "<description of version>" 984d814bc74017cce48a4c2a41161b472daa3264
git push origin NDTensors-v0.3.32

Please sign in to comment.