Skip to content

Commit 8c90583

Browse files
authored
Merge c5b8ff4 into fbf4a1d
2 parents fbf4a1d + c5b8ff4 commit 8c90583

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

60 files changed

+1849
-322
lines changed

NDTensors/src/NDTensors.jl

Lines changed: 20 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -19,33 +19,26 @@ using Strided
1919
using TimerOutputs
2020
using TupleTools
2121

22-
# TODO: List types, macros, and functions being used.
23-
include("lib/AlgorithmSelection/src/AlgorithmSelection.jl")
24-
using .AlgorithmSelection: AlgorithmSelection
25-
include("lib/BaseExtensions/src/BaseExtensions.jl")
26-
using .BaseExtensions: BaseExtensions
27-
include("lib/SetParameters/src/SetParameters.jl")
28-
using .SetParameters
29-
include("lib/BroadcastMapConversion/src/BroadcastMapConversion.jl")
30-
using .BroadcastMapConversion: BroadcastMapConversion
31-
include("lib/Unwrap/src/Unwrap.jl")
32-
using .Unwrap
33-
include("lib/RankFactorization/src/RankFactorization.jl")
34-
using .RankFactorization: RankFactorization
35-
include("lib/TensorAlgebra/src/TensorAlgebra.jl")
36-
using .TensorAlgebra: TensorAlgebra
37-
include("lib/DiagonalArrays/src/DiagonalArrays.jl")
38-
using .DiagonalArrays
39-
include("lib/BlockSparseArrays/src/BlockSparseArrays.jl")
40-
using .BlockSparseArrays
41-
include("lib/NamedDimsArrays/src/NamedDimsArrays.jl")
42-
using .NamedDimsArrays: NamedDimsArrays
43-
include("lib/SmallVectors/src/SmallVectors.jl")
44-
using .SmallVectors
45-
include("lib/SortedSets/src/SortedSets.jl")
46-
using .SortedSets
47-
include("lib/TagSets/src/TagSets.jl")
48-
using .TagSets
22+
for lib in [
23+
:AlgorithmSelection,
24+
:BaseExtensions,
25+
:SetParameters,
26+
:BroadcastMapConversion,
27+
:Unwrap,
28+
:RankFactorization,
29+
:TensorAlgebra,
30+
:SparseArrayInterface,
31+
:SparseArrayDOKs,
32+
:DiagonalArrays,
33+
:BlockSparseArrays,
34+
:NamedDimsArrays,
35+
:SmallVectors,
36+
:SortedSets,
37+
:TagSets,
38+
]
39+
include("lib/$(lib)/src/$(lib).jl")
40+
@eval using .$lib: $lib
41+
end
4942

5043
using Base: @propagate_inbounds, ReshapedArray, DimOrInd, OneTo
5144

NDTensors/src/abstractarray/fill.jl

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
using .SetParameters: DefaultParameters, set_unspecified_parameters
2+
using .Unwrap: unwrap_type
3+
14
function generic_randn(
25
arraytype::Type{<:AbstractArray}, dim::Integer=0; rng=Random.default_rng()
36
)

NDTensors/src/abstractarray/similar.jl

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
using .Unwrap: IsWrappedArray
2+
13
## Custom `NDTensors.similar` implementation.
24
## More extensive than `Base.similar`.
35

NDTensors/src/abstractarray/tensoralgebra/contract.jl

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,7 @@
11
using LinearAlgebra: BlasFloat
2+
using .Unwrap: expose
3+
4+
# TODO: Delete these exports
25
export backend_auto, backend_blas, backend_generic
36

47
@eval struct GemmBackend{T}

NDTensors/src/array/permutedims.jl

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,7 @@
1-
## Create the Exposed version of Base.permutedims
1+
using .Unwrap: Exposed, unexpose
2+
3+
# TODO: Move to `Unwrap` module.
4+
# Create the Exposed version of Base.permutedims
25
function permutedims(E::Exposed{<:Array}, perm)
36
## Creating Mperm here to evaluate the permutation and
47
## avoid returning a Stridedview

NDTensors/src/array/set_types.jl

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
using .SetParameters: Position, get_parameter, set_parameters
2+
13
"""
24
TODO: Use `Accessors.jl` notation:
35
```julia

NDTensors/src/arraystorage/arraystorage/storage/arraystorage.jl

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
using .BlockSparseArrays: BlockSparseArray
2+
using .DiagonalArrays: DiagonalArray
3+
14
# Used for dispatch to distinguish from Tensors wrapping TensorStorage.
25
# Remove once TensorStorage is removed.
36
const ArrayStorage{T,N} = Union{

NDTensors/src/arraystorage/arraystorage/tensor/svd.jl

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
using .DiagonalArrays: DiagIndices, DiagonalMatrix
2+
13
backup_svd_alg(::Algorithm"divide_and_conquer") = Algorithm"qr_iteration"()
24
backup_svd_alg(::Algorithm"qr_iteration") = Algorithm"recursive"()
35

@@ -111,7 +113,7 @@ function svd(
111113
# Make the new indices to go onto U and V
112114
# TODO: Put in a separate function, such as
113115
# `rewrap_inds` or something like that.
114-
dS = length(S[DiagIndices()])
116+
dS = length(S[DiagIndices(:)])
115117
indstype = typeof(inds(T))
116118
u = eltype(indstype)(dS)
117119
v = eltype(indstype)(dS)

NDTensors/src/arraystorage/blocksparsearray/storage/contract.jl

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
# TODO: Change to:
2+
# using .SparseArrayDOKs: SparseArrayDOK
3+
using .BlockSparseArrays: SparseArray
4+
15
# TODO: This is inefficient, need to optimize.
26
# Look at `contract_labels`, `contract_blocks` and `maybe_contract_blocks!` in:
37
# src/blocksparse/contract_utilities.jl

NDTensors/src/imports.jl

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,10 @@
1+
# Makes `cpu` available as `NDTensors.cpu`.
2+
# TODO: Define `cpu`, `cu`, etc. in a module `DeviceAbstractions`,
3+
# similar to:
4+
# https://github.com/JuliaGPU/KernelAbstractions.jl
5+
# https://github.com/oschulz/HeterogeneousComputing.jl
6+
using .Unwrap: cpu
7+
18
import Base:
29
# Types
310
AbstractFloat,

0 commit comments

Comments
 (0)