Skip to content

Commit 2a1aa19

Browse files
authored
Runic formatting (#1673)
1 parent 21a95f3 commit 2a1aa19

File tree

79 files changed

+14013
-13972
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

79 files changed

+14013
-13972
lines changed

.github/PULL_REQUEST_TEMPLATE.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,13 +11,15 @@ If practical and applicable, please include a minimal demonstration of the previ
1111
```julia
1212
[YOUR MINIMAL DEMONSTRATION OF PREVIOUS BEHAVIOR]
1313
```
14+
1415
</p></details>
1516

1617
<details><summary>Minimal demonstration of new behavior</summary><p>
1718

1819
```julia
1920
[YOUR MINIMAL DEMONSTRATION OF NEW BEHAVIOR]
2021
```
22+
2123
</p></details>
2224

2325
# How Has This Been Tested?

.github/workflows/FormatCheck.yml

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,14 @@
11
name: "Format Check"
22

33
on:
4-
push:
5-
branches:
6-
- 'main'
7-
tags: '*'
8-
pull_request:
4+
pull_request_target:
5+
paths: ['**/*.jl']
6+
types: [opened, synchronize, reopened, ready_for_review]
7+
8+
permissions:
9+
contents: read
10+
actions: write
11+
pull-requests: write
912

1013
jobs:
1114
format-check:
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
name: "Format Pull Request"
2+
3+
on:
4+
schedule:
5+
- cron: '0 0 * * *'
6+
workflow_dispatch:
7+
permissions:
8+
contents: write
9+
pull-requests: write
10+
11+
jobs:
12+
format-pull-request:
13+
name: "Format Pull Request"
14+
uses: "ITensor/ITensorActions/.github/workflows/FormatPullRequest.yml@main"

.github/workflows/VersionCheck.yml

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
name: "Version Check"
2+
3+
on:
4+
pull_request:
5+
6+
jobs:
7+
version-check:
8+
name: "Version Check"
9+
uses: "ITensor/ITensorActions/.github/workflows/VersionCheck.yml@main"
10+
with:
11+
localregistry: https://github.com/ITensor/ITensorRegistry.git

.pre-commit-config.yaml

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
ci:
2-
skip: [julia-formatter]
2+
skip: [runic]
33

44
repos:
55
- repo: https://github.com/pre-commit/pre-commit-hooks
@@ -9,12 +9,10 @@ repos:
99
- id: check-toml
1010
- id: check-yaml
1111
- id: end-of-file-fixer
12-
exclude: '.*references/.*\.txt$' # do not check reference TN images
1312
exclude_types: [markdown] # incompatible with Literate.jl
14-
- id: trailing-whitespace
1513
exclude: '.*references/.*\.txt$' # do not check reference TN images
1614

17-
- repo: "https://github.com/domluna/JuliaFormatter.jl"
18-
rev: v2.1.6
15+
- repo: https://github.com/fredrikekre/runic-pre-commit
16+
rev: v2.0.1
1917
hooks:
20-
- id: "julia-formatter"
18+
- id: runic

NDTensors/Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "NDTensors"
22
uuid = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf"
33
authors = ["Matthew Fishman <[email protected]>"]
4-
version = "0.4.11"
4+
version = "0.4.12"
55

66
[deps]
77
Accessors = "7d9f7c33-5ae7-4f3b-8dc6-eff91059b697"

NDTensors/ext/NDTensorsHDF5Ext/blocksparse.jl

Lines changed: 52 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -3,66 +3,66 @@ using NDTensors: data, Block, blockoffsets, BlockOffsets, BlockSparse
33

44
# Helper function for HDF5 write/read of BlockSparse
55
function offsets_to_array(boff::BlockOffsets{N}) where {N}
6-
nblocks = length(boff)
7-
asize = (N + 1) * nblocks
8-
n = 1
9-
a = Vector{Int}(undef, asize)
10-
for bo in pairs(boff)
11-
for j in 1:N
12-
a[n] = bo[1][j]
13-
n += 1
6+
nblocks = length(boff)
7+
asize = (N + 1) * nblocks
8+
n = 1
9+
a = Vector{Int}(undef, asize)
10+
for bo in pairs(boff)
11+
for j in 1:N
12+
a[n] = bo[1][j]
13+
n += 1
14+
end
15+
a[n] = bo[2]
16+
n += 1
1417
end
15-
a[n] = bo[2]
16-
n += 1
17-
end
18-
return a
18+
return a
1919
end
2020

2121
# Helper function for HDF5 write/read of BlockSparse
2222
function array_to_offsets(a, N::Int)
23-
asize = length(a)
24-
nblocks = div(asize, N + 1)
25-
boff = BlockOffsets{N}()
26-
j = 0
27-
for b in 1:nblocks
28-
insert!(boff, Block(ntuple(i -> (a[j + i]), N)), a[j + N + 1])
29-
j += (N + 1)
30-
end
31-
return boff
23+
asize = length(a)
24+
nblocks = div(asize, N + 1)
25+
boff = BlockOffsets{N}()
26+
j = 0
27+
for b in 1:nblocks
28+
insert!(boff, Block(ntuple(i -> (a[j + i]), N)), a[j + N + 1])
29+
j += (N + 1)
30+
end
31+
return boff
3232
end
3333

34-
function HDF5.write(parent::Union{HDF5.File,HDF5.Group}, name::String, B::BlockSparse)
35-
g = create_group(parent, name)
36-
attributes(g)["type"] = "BlockSparse{$(eltype(B))}"
37-
attributes(g)["version"] = 1
38-
if eltype(B) != Nothing
39-
write(g, "ndims", ndims(B))
40-
write(g, "data", data(B))
41-
off_array = offsets_to_array(blockoffsets(B))
42-
write(g, "offsets", off_array)
43-
end
34+
function HDF5.write(parent::Union{HDF5.File, HDF5.Group}, name::String, B::BlockSparse)
35+
g = create_group(parent, name)
36+
attributes(g)["type"] = "BlockSparse{$(eltype(B))}"
37+
attributes(g)["version"] = 1
38+
return if eltype(B) != Nothing
39+
write(g, "ndims", ndims(B))
40+
write(g, "data", data(B))
41+
off_array = offsets_to_array(blockoffsets(B))
42+
write(g, "offsets", off_array)
43+
end
4444
end
4545

4646
function HDF5.read(
47-
parent::Union{HDF5.File,HDF5.Group}, name::AbstractString, ::Type{Store}
48-
) where {Store<:BlockSparse}
49-
g = open_group(parent, name)
50-
ElT = eltype(Store)
51-
typestr = "BlockSparse{$ElT}"
52-
if read(attributes(g)["type"]) != typestr
53-
error("HDF5 group or file does not contain $typestr data")
54-
end
55-
N = read(g, "ndims")
56-
off_array = read(g, "offsets")
57-
boff = array_to_offsets(off_array, N)
58-
# Attribute __complex__ is attached to the "data" dataset
59-
# by the h5 library used by C++ version of ITensor:
60-
if haskey(attributes(g["data"]), "__complex__")
61-
M = read(g, "data")
62-
nelt = size(M, 1) * size(M, 2)
63-
data = Vector(reinterpret(ComplexF64, reshape(M, nelt)))
64-
else
65-
data = read(g, "data")
66-
end
67-
return BlockSparse(data, boff)
47+
parent::Union{HDF5.File, HDF5.Group}, name::AbstractString, ::Type{Store}
48+
) where {Store <: BlockSparse}
49+
g = open_group(parent, name)
50+
ElT = eltype(Store)
51+
typestr = "BlockSparse{$ElT}"
52+
if read(attributes(g)["type"]) != typestr
53+
error("HDF5 group or file does not contain $typestr data")
54+
end
55+
N = read(g, "ndims")
56+
off_array = read(g, "offsets")
57+
boff = array_to_offsets(off_array, N)
58+
# Attribute __complex__ is attached to the "data" dataset
59+
# by the h5 library used by C++ version of ITensor:
60+
if haskey(attributes(g["data"]), "__complex__")
61+
M = read(g, "data")
62+
nelt = size(M, 1) * size(M, 2)
63+
data = Vector(reinterpret(ComplexF64, reshape(M, nelt)))
64+
else
65+
data = read(g, "data")
66+
end
67+
return BlockSparse(data, boff)
6868
end

NDTensors/ext/NDTensorsHDF5Ext/dense.jl

Lines changed: 29 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -2,36 +2,36 @@ using HDF5: HDF5, attributes, create_group, open_group, read, write
22
using NDTensors: Dense
33

44
function HDF5.write(
5-
parent::Union{HDF5.File,HDF5.Group}, name::String, D::Store
6-
) where {Store<:Dense}
7-
g = create_group(parent, name)
8-
attributes(g)["type"] = "Dense{$(eltype(Store))}"
9-
attributes(g)["version"] = 1
10-
if eltype(D) != Nothing
11-
write(g, "data", D.data)
12-
end
5+
parent::Union{HDF5.File, HDF5.Group}, name::String, D::Store
6+
) where {Store <: Dense}
7+
g = create_group(parent, name)
8+
attributes(g)["type"] = "Dense{$(eltype(Store))}"
9+
attributes(g)["version"] = 1
10+
return if eltype(D) != Nothing
11+
write(g, "data", D.data)
12+
end
1313
end
1414

1515
function HDF5.read(
16-
parent::Union{HDF5.File,HDF5.Group}, name::AbstractString, ::Type{Store}
17-
) where {Store<:Dense}
18-
g = open_group(parent, name)
19-
ElT = eltype(Store)
20-
typestr = "Dense{$ElT}"
21-
if read(attributes(g)["type"]) != typestr
22-
error("HDF5 group or file does not contain $typestr data")
23-
end
24-
if ElT == Nothing
25-
return Dense{Nothing}()
26-
end
27-
# Attribute __complex__ is attached to the "data" dataset
28-
# by the h5 library used by C++ version of ITensor:
29-
if haskey(attributes(g["data"]), "__complex__")
30-
M = read(g, "data")
31-
nelt = size(M, 1) * size(M, 2)
32-
data = Vector(reinterpret(ComplexF64, reshape(M, nelt)))
33-
else
34-
data = read(g, "data")
35-
end
36-
return Dense{ElT}(data)
16+
parent::Union{HDF5.File, HDF5.Group}, name::AbstractString, ::Type{Store}
17+
) where {Store <: Dense}
18+
g = open_group(parent, name)
19+
ElT = eltype(Store)
20+
typestr = "Dense{$ElT}"
21+
if read(attributes(g)["type"]) != typestr
22+
error("HDF5 group or file does not contain $typestr data")
23+
end
24+
if ElT == Nothing
25+
return Dense{Nothing}()
26+
end
27+
# Attribute __complex__ is attached to the "data" dataset
28+
# by the h5 library used by C++ version of ITensor:
29+
if haskey(attributes(g["data"]), "__complex__")
30+
M = read(g, "data")
31+
nelt = size(M, 1) * size(M, 2)
32+
data = Vector(reinterpret(ComplexF64, reshape(M, nelt)))
33+
else
34+
data = read(g, "data")
35+
end
36+
return Dense{ElT}(data)
3737
end

NDTensors/ext/NDTensorsHDF5Ext/diag.jl

Lines changed: 30 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -2,37 +2,37 @@ using HDF5: HDF5, attributes, create_group, open_group, read, write
22
using NDTensors: datatype, Dense, Diag
33

44
function HDF5.write(
5-
parent::Union{HDF5.File,HDF5.Group}, name::String, D::Store
6-
) where {Store<:Diag}
7-
g = create_group(parent, name)
8-
attributes(g)["type"] = "Diag{$(eltype(Store)),$(datatype(Store))}"
9-
attributes(g)["version"] = 1
10-
if eltype(D) != Nothing
11-
write(g, "data", D.data)
12-
end
5+
parent::Union{HDF5.File, HDF5.Group}, name::String, D::Store
6+
) where {Store <: Diag}
7+
g = create_group(parent, name)
8+
attributes(g)["type"] = "Diag{$(eltype(Store)),$(datatype(Store))}"
9+
attributes(g)["version"] = 1
10+
return if eltype(D) != Nothing
11+
write(g, "data", D.data)
12+
end
1313
end
1414

1515
function HDF5.read(
16-
parent::Union{HDF5.File,HDF5.Group}, name::AbstractString, ::Type{Store}
17-
) where {Store<:Diag}
18-
g = open_group(parent, name)
19-
ElT = eltype(Store)
20-
DataT = datatype(Store)
21-
typestr = "Diag{$ElT,$DataT}"
22-
if read(attributes(g)["type"]) != typestr
23-
error("HDF5 group or file does not contain $typestr data")
24-
end
25-
if ElT == Nothing
26-
return Dense{Nothing}()
27-
end
28-
# Attribute __complex__ is attached to the "data" dataset
29-
# by the h5 library used by C++ version of ITensor:
30-
if haskey(attributes(g["data"]), "__complex__")
31-
M = read(g, "data")
32-
nelt = size(M, 1) * size(M, 2)
33-
data = Vector(reinterpret(ComplexF64, reshape(M, nelt)))
34-
else
35-
data = read(g, "data")
36-
end
37-
return Diag{ElT,DataT}(data)
16+
parent::Union{HDF5.File, HDF5.Group}, name::AbstractString, ::Type{Store}
17+
) where {Store <: Diag}
18+
g = open_group(parent, name)
19+
ElT = eltype(Store)
20+
DataT = datatype(Store)
21+
typestr = "Diag{$ElT,$DataT}"
22+
if read(attributes(g)["type"]) != typestr
23+
error("HDF5 group or file does not contain $typestr data")
24+
end
25+
if ElT == Nothing
26+
return Dense{Nothing}()
27+
end
28+
# Attribute __complex__ is attached to the "data" dataset
29+
# by the h5 library used by C++ version of ITensor:
30+
if haskey(attributes(g["data"]), "__complex__")
31+
M = read(g, "data")
32+
nelt = size(M, 1) * size(M, 2)
33+
data = Vector(reinterpret(ComplexF64, reshape(M, nelt)))
34+
else
35+
data = read(g, "data")
36+
end
37+
return Diag{ElT, DataT}(data)
3838
end

0 commit comments

Comments
 (0)