Skip to content

Commit 7ab18c8

Browse files
committed
Initial implementation
1 parent 01b7781 commit 7ab18c8

File tree

9 files changed

+206
-100
lines changed

9 files changed

+206
-100
lines changed

.travis.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,10 @@ language: julia
33
os:
44
- linux
55
- osx
6+
- windows
67
julia:
78
- 1.0
9+
- 1
810
- nightly
911
notifications:
1012
email: false

Project.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,9 @@ uuid = "62fd8b95-f654-4bbd-a8a5-9c27f68ccd50"
33
authors = ["Tim Holy <[email protected]>"]
44
version = "0.1.0"
55

6+
[deps]
7+
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
8+
69
[compat]
710
julia = "1"
811

README.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
# TensorCore
22

33
[![Stable](https://img.shields.io/badge/docs-stable-blue.svg)](https://JuliaMath.github.io/TensorCore.jl/stable)
4-
[![Dev](https://img.shields.io/badge/docs-dev-blue.svg)](https://JuliaMath.github.io/TensorCore.jl/dev)
54
[![Build Status](https://travis-ci.com/JuliaMath/TensorCore.jl.svg?branch=master)](https://travis-ci.com/JuliaMath/TensorCore.jl)
65
[![Codecov](https://codecov.io/gh/JuliaMath/TensorCore.jl/branch/master/graph/badge.svg)](https://codecov.io/gh/JuliaMath/TensorCore.jl)
6+
7+
This package is intended as a lightweight foundation for tensor operations across the Julia ecosystem.
8+
Currently it exports two core operations, `hadamard` and `tensor`, and corresponding unicode operators `` and ``, respectively.

docs/Manifest.toml

Lines changed: 0 additions & 93 deletions
This file was deleted.

docs/Project.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,5 @@
11
[deps]
22
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
3+
4+
[compat]
5+
Documenter = "0.24"

docs/make.jl

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@ makedocs(;
99
repo="https://github.com/JuliaMath/TensorCore.jl/blob/{commit}{path}#L{line}",
1010
sitename="TensorCore.jl",
1111
authors="Tim Holy <[email protected]>",
12-
assets=String[],
1312
)
1413

1514
deploydocs(;

docs/src/index.md

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,16 @@
11
# TensorCore.jl
22

3+
This package is intended as a lightweight foundation for tensor operations across the Julia ecosystem.
4+
Currently it exports two core operations, `hadamard` and `tensor`, and corresponding unicode operators `` and ``, respectively.
5+
6+
## API
7+
38
```@index
49
```
510

6-
```@autodocs
7-
Modules = [TensorCore]
11+
```@docs
12+
hadamard
13+
hadamard!
14+
tensor
15+
tensor!
816
```

src/TensorCore.jl

Lines changed: 123 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,126 @@
11
module TensorCore
22

3-
greet() = print("Hello World!")
3+
using LinearAlgebra
44

5-
end # module
5+
export , hadamard, hadamard!
6+
export , tensor, tensor!
7+
8+
"""
9+
hadamard(a, b)
10+
a ⊙ b
11+
12+
For arrays `a` and `b`, perform elementwise multiplication.
13+
`a` and `b` must have identical `axes`.
14+
15+
`⊙` can be passed as an operator to higher-order functions.
16+
17+
```jldoctest; setup=:(using TensorCore)
18+
julia> a = [2, 3]; b = [5, 7];
19+
20+
julia> a ⊙ b
21+
2-element Array{$Int,1}:
22+
10
23+
21
24+
25+
julia> a ⊙ [5]
26+
ERROR: DimensionMismatch("Axes of `A` and `B` must match, got (Base.OneTo(2),) and (Base.OneTo(1),)")
27+
[...]
28+
```
29+
"""
30+
function hadamard(A::AbstractArray, B::AbstractArray)
31+
@noinline throw_dmm(axA, axB) = throw(DimensionMismatch("Axes of `A` and `B` must match, got $axA and $axB"))
32+
33+
axA, axB = axes(A), axes(B)
34+
axA == axB || throw_dmm(axA, axB)
35+
return map(*, A, B)
36+
end
37+
const = hadamard
38+
39+
"""
40+
hadamard!(dest, A, B)
41+
42+
Similar to `hadamard(A, B)` (which can also be written `A ⊙ B`), but stores its results in
43+
the pre-allocated array `dest`.
44+
"""
45+
function hadamard!(dest::AbstractArray, A::AbstractArray, B::AbstractArray)
46+
@noinline function throw_dmm(axA, axB, axdest)
47+
throw(DimensionMismatch("`axes(dest) = $axdest` must be equal to `axes(A) = $axA` and `axes(B) = $axB`"))
48+
end
49+
50+
axA, axB, axdest = axes(A), axes(B), axes(dest)
51+
((axdest == axA) & (axdest == axB)) || throw_dmm(axA, axB, axdest)
52+
@simd for I in eachindex(dest, A, B)
53+
@inbounds dest[I] = A[I] * B[I]
54+
end
55+
return dest
56+
end
57+
58+
"""
59+
tensor(A, B)
60+
A ⊗ B
61+
62+
Compute the tensor product of `A` and `B`.
63+
If `C = A ⊗ B`, then `C[i1, ..., im, j1, ..., jn] = A[i1, ... im] * B[j1, ..., jn]`.
64+
65+
```jldoctest; setup=:(using TensorCore)
66+
julia> a = [2, 3]; b = [5, 7, 11];
67+
68+
julia> a ⊗ b
69+
2×3 Array{$Int,2}:
70+
10 14 22
71+
15 21 33
72+
```
73+
74+
For vectors `v` and `w`, the Kronecker product is related to the tensor product by
75+
`kron(v,w) == vec(w ⊗ v)` or `w ⊗ v == reshape(kron(v,w), (length(w), length(v)))`.
76+
"""
77+
tensor(A::AbstractArray, B::AbstractArray) = [a*b for a in A, b in B]
78+
const = tensor
79+
80+
const CovectorLike{T} = Union{Adjoint{T,<:AbstractVector},Transpose{T,<:AbstractVector}}
81+
function tensor(u::AbstractArray, v::CovectorLike)
82+
# If `v` is thought of as a covector, you might want this to be two-dimensional,
83+
# but thought of as a matrix it should be three-dimensional.
84+
# The safest is to avoid supporting it at all. See discussion in #35150.
85+
error("`tensor` is not defined for co-vectors, perhaps you meant `*`?")
86+
end
87+
function tensor(u::CovectorLike, v::AbstractArray)
88+
error("`tensor` is not defined for co-vectors, perhaps you meant `*`?")
89+
end
90+
function tensor(u::CovectorLike, v::CovectorLike)
91+
error("`tensor` is not defined for co-vectors, perhaps you meant `*`?")
92+
end
93+
94+
"""
95+
tensor!(dest, A, B)
96+
97+
Similar to `tensor(A, B)` (which can also be written `A ⊗ B`), but stores its results in
98+
the pre-allocated array `dest`.
99+
"""
100+
function tensor!(dest::AbstractArray, A::AbstractArray, B::AbstractArray)
101+
@noinline function throw_dmm(axA, axB, axdest)
102+
throw(DimensionMismatch("`axes(dest) = $axdest` must concatenate `axes(A) = $axA` and `axes(B) = $axB`"))
103+
end
104+
105+
axA, axB, axdest = axes(A), axes(B), axes(dest)
106+
axes(dest) == (axA..., axB...) || throw_dmm(axA, axB, axdest)
107+
if IndexStyle(dest) === IndexCartesian()
108+
for IB in CartesianIndices(axB)
109+
@inbounds b = B[IB]
110+
@simd for IA in CartesianIndices(axA)
111+
@inbounds dest[IA,IB] = A[IA]*b
112+
end
113+
end
114+
else
115+
i = firstindex(dest)
116+
@inbounds for b in B
117+
@simd for a in A
118+
dest[i] = a*b
119+
i += 1
120+
end
121+
end
122+
end
123+
return dest
124+
end
125+
126+
end

test/runtests.jl

Lines changed: 62 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,67 @@
11
using TensorCore
2+
using LinearAlgebra
23
using Test
34

5+
@testset "Ambiguities" begin
6+
@test isempty(detect_ambiguities(TensorCore, Base, Core, LinearAlgebra))
7+
end
8+
49
@testset "TensorCore.jl" begin
5-
# Write your own tests here.
10+
for T in (Int, Float32, Float64, BigFloat)
11+
a = [T[1, 2], T[-3, 7]]
12+
b = [T[5, 11], T[-13, 17]]
13+
@test map(, a, b) == map(dot, a, b) == [27, 158]
14+
@test map(, a, b) == map(hadamard, a, b) == [a[1].*b[1], a[2].*b[2]]
15+
@test map(, a, b) == map(tensor, a, b) == [a[1]*transpose(b[1]), a[2]*transpose(b[2])]
16+
@test hadamard!(fill(typemax(Int), 2), T[1, 2], T[-3, 7]) == [-3, 14]
17+
@test tensor!(fill(typemax(Int), 2, 2), T[1, 2], T[-3, 7]) == [-3 7; -6 14]
18+
end
19+
20+
@test_throws DimensionMismatch [1,2] [3]
21+
@test_throws DimensionMismatch hadamard!([0, 0, 0], [1,2], [-3,7])
22+
@test_throws DimensionMismatch hadamard!([0, 0], [1,2], [-3])
23+
@test_throws DimensionMismatch hadamard!([0, 0], [1], [-3,7])
24+
@test_throws DimensionMismatch tensor!(Matrix{Int}(undef, 2, 2), [1], [-3,7])
25+
@test_throws DimensionMismatch tensor!(Matrix{Int}(undef, 2, 2), [1,2], [-3])
26+
27+
u, v = [2+2im, 3+5im], [1-3im, 7+3im]
28+
@test u v == conj(u[1])*v[1] + conj(u[2])*v[2]
29+
@test u v == [u[1]*v[1], u[2]*v[2]]
30+
@test u v == [u[1]*v[1] u[1]*v[2]; u[2]*v[1] u[2]*v[2]]
31+
@test hadamard(u, v) == u v
32+
@test tensor(u, v) == u v
33+
dest = similar(u)
34+
@test hadamard!(dest, u, v) == u v
35+
dest = Matrix{Complex{Int}}(undef, 2, 2)
36+
@test tensor!(dest, u, v) == u v
37+
38+
for (A, B, b) in (([1 2; 3 4], [5 6; 7 8], [5,6]),
39+
([1+0.8im 2+0.7im; 3+0.6im 4+0.5im],
40+
[5+0.4im 6+0.3im; 7+0.2im 8+0.1im],
41+
[5+0.6im,6+0.3im]))
42+
@test A b == cat(A*b[1], A*b[2]; dims=3)
43+
@test A B == cat(cat(A*B[1,1], A*B[2,1]; dims=3),
44+
cat(A*B[1,2], A*B[2,2]; dims=3); dims=4)
45+
end
46+
47+
A, B = reshape(1:27, 3, 3, 3), reshape(1:4, 2, 2)
48+
@test A B == [a*b for a in A, b in B]
49+
50+
# Adjoint/transpose is a dual vector, not an AbstractMatrix
51+
v = [1,2]
52+
@test_throws ErrorException v v'
53+
@test_throws ErrorException v transpose(v)
54+
@test_throws ErrorException v' v
55+
@test_throws ErrorException transpose(v) v
56+
@test_throws ErrorException v' v'
57+
@test_throws ErrorException transpose(v) transpose(v)
58+
@test_throws ErrorException v' transpose(v)
59+
@test_throws ErrorException transpose(v) v'
60+
@test_throws ErrorException A v'
61+
@test_throws ErrorException A transpose(v)
62+
63+
# Docs comparison to `kron`
64+
v, w = [1,2,3], [5,7]
65+
@test kron(v,w) == vec(w v)
66+
@test w v == reshape(kron(v,w), (length(w), length(v)))
667
end

0 commit comments

Comments
 (0)