mirror of
https://github.com/kalmarek/SmallHyperbolic
synced 2024-11-23 23:40:28 +01:00
commit
ed5ba406b9
13
README.md
13
README.md
@ -5,7 +5,18 @@ The repository contains code for running experiments for
|
|||||||
[Marek Kaluba](https://kalmar.faculty.wmi.amu.edu.pl/) and
|
[Marek Kaluba](https://kalmar.faculty.wmi.amu.edu.pl/) and
|
||||||
[Stefan Witzel](https://www.math.uni-bielefeld.de/~switzel/).
|
[Stefan Witzel](https://www.math.uni-bielefeld.de/~switzel/).
|
||||||
|
|
||||||
There are three disjoint computations covered in this repository.
|
# Introduction
|
||||||
|
If you arrived here after reading the article looking for groups and
|
||||||
|
1. you don't understand any of this, you probably want to visit [this page](https://kalmarek.github.io/SmallHyperbolic/) instead;
|
||||||
|
2. you want just the machine-readable data, then the `json` file is available [here](https://github.com/kalmarek/SmallHyperbolic/blob/master/data/triangle_groups.json) (the file was generated by calling `julia ./scripts/create_json/create_json.jl`);
|
||||||
|
3. you want to re-run some of the computations that lead to the results, then continue reading.
|
||||||
|
|
||||||
|
# Computations
|
||||||
|
|
||||||
|
There are three disjoint computations covered in this repository:
|
||||||
|
1. certified eigenvalue computations for _PSL₂(p)_,
|
||||||
|
2. sum of squares computations in an attempt to prove property (T) by estimating spectral gap of the group Laplacian,
|
||||||
|
3. generation of magma files used to compute e.g. witnesses for non-hyperbolicity, rank of abelianization, etc.
|
||||||
|
|
||||||
## Eigenvalues computations for _PSL₂(p)_
|
## Eigenvalues computations for _PSL₂(p)_
|
||||||
|
|
||||||
|
@ -57,21 +57,22 @@ function SL2p_gens(p::Integer)
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
return a,b
|
return a, b
|
||||||
end
|
end
|
||||||
|
|
||||||
function adjacency(ϱ, a, b; prec=256)
|
function adjacency(ϱ, a, b; prec = 256)
|
||||||
order_a = findfirst(i-> isone(a^i), 1:100)
|
order_a = findfirst(i -> isone(a^i), 1:100)
|
||||||
order_b = findfirst(i-> isone(b^i), 1:100)
|
order_b = findfirst(i -> isone(b^i), 1:100)
|
||||||
@assert !isnothing(order_a) && order_a > 1
|
@assert !isnothing(order_a) && order_a > 1
|
||||||
@assert !isnothing(order_b) && order_b > 1
|
@assert !isnothing(order_b) && order_b > 1
|
||||||
|
|
||||||
k = order_a-1 + order_b-1
|
k = order_a - 1 + order_b - 1
|
||||||
|
|
||||||
A = AcbMatrix(ϱ(a), prec=prec)
|
A = AcbMatrix(ϱ(a), prec = prec)
|
||||||
B = AcbMatrix(ϱ(b), prec=prec)
|
B = AcbMatrix(ϱ(b), prec = prec)
|
||||||
res = sum(A^i for i = 1:order_a-1) + sum(B^i for i = 1:order_b-1)
|
res = sum(A^i for i = 1:order_a-1) + sum(B^i for i = 1:order_b-1)
|
||||||
return Arblib.scalar_div!(res, res, k)
|
#return Arblib.scalar_div!(res, res, k)
|
||||||
|
return res
|
||||||
end
|
end
|
||||||
|
|
||||||
function parse_our_args()
|
function parse_our_args()
|
||||||
@ -113,7 +114,8 @@ end
|
|||||||
parsed_args = parse_our_args()
|
parsed_args = parse_our_args()
|
||||||
|
|
||||||
const p = let p = parsed_args["p"]
|
const p = let p = parsed_args["p"]
|
||||||
isprime(p) || @error "You need to provide a prime, ex: `julia adj_psl2_eigvals.jl -p 31`"
|
isprime(p) ||
|
||||||
|
@error "You need to provide a prime, ex: `julia adj_psl2_eigvals.jl -p 31`"
|
||||||
p
|
p
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -124,14 +126,14 @@ open(LOGFILE, "w") do io
|
|||||||
@info "Logging into $LOGFILE"
|
@info "Logging into $LOGFILE"
|
||||||
with_logger(SimpleLogger(io)) do
|
with_logger(SimpleLogger(io)) do
|
||||||
|
|
||||||
@info "Arguments:" args=parsed_args
|
@info "Arguments:" args = parsed_args
|
||||||
|
|
||||||
a,b = SL2p_gens(p)
|
a, b = SL2p_gens(p)
|
||||||
a = SL₂{p}(get(parsed_args, "a", a))
|
a = SL₂{p}(get(parsed_args, "a", a))
|
||||||
b = SL₂{p}(get(parsed_args, "b", b))
|
b = SL₂{p}(get(parsed_args, "b", b))
|
||||||
@info "Generators" a b
|
@info "Generators" a b
|
||||||
|
|
||||||
Borel_cosets = let p = p, (a,b) = (a,b)
|
Borel_cosets = let p = p, (a, b) = (a, b)
|
||||||
SL2p, sizes =
|
SL2p, sizes =
|
||||||
RamanujanGraphs.generate_balls([a, b, inv(a), inv(b)], radius = 21)
|
RamanujanGraphs.generate_balls([a, b, inv(a), inv(b)], radius = 21)
|
||||||
@assert sizes[end] == RamanujanGraphs.order(SL₂{p})
|
@assert sizes[end] == RamanujanGraphs.order(SL₂{p})
|
||||||
@ -143,11 +145,11 @@ open(LOGFILE, "w") do io
|
|||||||
|
|
||||||
for j = 0:(p-1)÷4
|
for j = 0:(p-1)÷4
|
||||||
h = PrincipalRepr(
|
h = PrincipalRepr(
|
||||||
α => unit_root((p - 1) ÷ 2, j, prec=PRECISION),
|
α => unit_root((p - 1) ÷ 2, j, prec = PRECISION),
|
||||||
Borel_cosets,
|
Borel_cosets,
|
||||||
)
|
)
|
||||||
|
|
||||||
@time adj = adjacency(h, a, b, prec=PRECISION)
|
@time adj = adjacency(h, a, b, prec = PRECISION)
|
||||||
|
|
||||||
try
|
try
|
||||||
@time evs = let evs = safe_eigvals(adj)
|
@time evs = let evs = safe_eigvals(adj)
|
||||||
@ -168,20 +170,20 @@ open(LOGFILE, "w") do io
|
|||||||
|
|
||||||
if p % 4 == 1
|
if p % 4 == 1
|
||||||
ub = (p - 1) ÷ 4
|
ub = (p - 1) ÷ 4
|
||||||
ζ = unit_root((p + 1) ÷ 2, 1, prec=PRECISION)
|
ζ = unit_root((p + 1) ÷ 2, 1, prec = PRECISION)
|
||||||
else # p % 4 == 3
|
else # p % 4 == 3
|
||||||
ub = (p + 1) ÷ 4
|
ub = (p + 1) ÷ 4
|
||||||
ζ = unit_root((p + 1), 1, prec=PRECISION)
|
ζ = unit_root((p + 1), 1, prec = PRECISION)
|
||||||
end
|
end
|
||||||
|
|
||||||
for k = 1:ub
|
for k = 1:ub
|
||||||
|
|
||||||
h = DiscreteRepr(
|
h = DiscreteRepr(
|
||||||
RamanujanGraphs.GF{p}(1) => unit_root(p, prec=PRECISION),
|
RamanujanGraphs.GF{p}(1) => unit_root(p, prec = PRECISION),
|
||||||
β => ζ^k,
|
β => ζ^k,
|
||||||
)
|
)
|
||||||
|
|
||||||
@time adj = adjacency(h, a, b, prec=PRECISION)
|
@time adj = adjacency(h, a, b, prec = PRECISION)
|
||||||
|
|
||||||
try
|
try
|
||||||
@time evs = let evs = safe_eigvals(adj)
|
@time evs = let evs = safe_eigvals(adj)
|
||||||
@ -196,11 +198,11 @@ open(LOGFILE, "w") do io
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
all_large_evs = sort(all_large_evs, rev=true)
|
all_large_evs = sort(all_large_evs, rev = true)
|
||||||
λ = all_large_evs[2]
|
λ = all_large_evs[2]
|
||||||
ε = (λ - 3)/5
|
ε = (λ - 3) / 5
|
||||||
α = acos(ε)
|
α = acos(ε)
|
||||||
α_deg = (α/pi)*180
|
α_deg = (α / pi) * 180
|
||||||
@info "Certified values:" λ ε α α_deg
|
@info "Certified values:" λ ε α α_deg
|
||||||
end # with_logger
|
end # with_logger
|
||||||
end # open(logfile)
|
end # open(logfile)
|
||||||
|
@ -1,45 +0,0 @@
|
|||||||
using Pkg
|
|
||||||
Pkg.activate(@__DIR__)
|
|
||||||
using DelimitedFiles
|
|
||||||
using JSON
|
|
||||||
|
|
||||||
include(joinpath(@__DIR__, "parse_presentations.jl"))
|
|
||||||
include(joinpath(@__DIR__, "smallhyperbolicgrp.jl"))
|
|
||||||
|
|
||||||
all_grps_presentations =
|
|
||||||
let tables = [
|
|
||||||
joinpath(@__DIR__, f) for f in readdir(@__DIR__) if
|
|
||||||
isfile(joinpath(@__DIR__, f)) && endswith(f, ".txt")
|
|
||||||
]
|
|
||||||
mapreduce(parse_grouppresentations_abstract, union, tables) |> Dict
|
|
||||||
end
|
|
||||||
|
|
||||||
tr_grps =
|
|
||||||
let csvs = [
|
|
||||||
joinpath(@__DIR__, f) for f in readdir(@__DIR__) if
|
|
||||||
isfile(joinpath(@__DIR__, f)) && endswith(f, ".csv")
|
|
||||||
]
|
|
||||||
|
|
||||||
trGrps = mapreduce(union, csvs) do file
|
|
||||||
m = match(r".*_(\d)_(\d)_(\d).csv", basename(file))
|
|
||||||
@assert !isnothing(m)
|
|
||||||
type = parse.(Int, tuple(m[1], m[2], m[3]))
|
|
||||||
|
|
||||||
data = readdlm(file, '&')
|
|
||||||
labels = Symbol.(replace.(strip.(data[1, :]), ' ' => '_', '-' => '_'))
|
|
||||||
groups = data[2:end, :]
|
|
||||||
grps = map(enumerate(eachrow(groups))) do (i, props)
|
|
||||||
nt = (; (Symbol(l) => v for (l, v) in zip(labels, props))...)
|
|
||||||
@debug i, grp_name(nt)
|
|
||||||
P = all_grps_presentations[grp_name(nt)]
|
|
||||||
grp = TriangleGrp(type, P.generators, P.relations, nt)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
open(joinpath(@__DIR__, "triangle_groups.json"), "w") do io
|
|
||||||
f(args...) = show_json(args...; indent = 4)
|
|
||||||
s = sprint(f, TriangleGrpSerialization(), tr_grps)
|
|
||||||
# JSON.print(io, , 4)
|
|
||||||
print(io, s)
|
|
||||||
end
|
|
@ -1,16 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# encoding: utf-8
|
|
||||||
"""Use instead of `python3 -m http.server` when you need CORS"""
|
|
||||||
|
|
||||||
from http.server import HTTPServer, SimpleHTTPRequestHandler
|
|
||||||
|
|
||||||
class CORSRequestHandler(SimpleHTTPRequestHandler):
|
|
||||||
def end_headers(self):
|
|
||||||
self.send_header('Access-Control-Allow-Origin', '*')
|
|
||||||
self.send_header('Access-Control-Allow-Methods', 'GET')
|
|
||||||
self.send_header('Cache-Control', 'no-store, no-cache, must-revalidate')
|
|
||||||
return super(CORSRequestHandler, self).end_headers()
|
|
||||||
|
|
||||||
|
|
||||||
httpd = HTTPServer(('localhost', 8003), CORSRequestHandler)
|
|
||||||
httpd.serve_forever()
|
|
46
scripts/create_json/create_json.jl
Normal file
46
scripts/create_json/create_json.jl
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
using Pkg
|
||||||
|
Pkg.activate(@__DIR__)
|
||||||
|
using DelimitedFiles
|
||||||
|
using JSON
|
||||||
|
|
||||||
|
include(joinpath(@__DIR__, "parse_presentations.jl"))
|
||||||
|
include(joinpath(@__DIR__, "smallhyperbolicgrp.jl"))
|
||||||
|
|
||||||
|
const DATA_DIR = joinpath(@__DIR__, "..", "..", "data")
|
||||||
|
|
||||||
|
function _files_with_extension(dir::AbstractString, ext::AbstractString)
|
||||||
|
return [
|
||||||
|
joinpath(dir, f) for
|
||||||
|
f in readdir(dir) if isfile(joinpath(dir, f)) && endswith(f, '.' * ext)
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
all_grps_presentations = let tables = _files_with_extension(DATA_DIR, "txt")
|
||||||
|
mapreduce(parse_grouppresentations_abstract, union, tables) |> Dict
|
||||||
|
end
|
||||||
|
|
||||||
|
grps = let csvs = _files_with_extension(DATA_DIR, "csv")
|
||||||
|
|
||||||
|
trGrps = mapreduce(union, csvs) do file
|
||||||
|
m = match(r".*_(\d)_(\d)_(\d).csv", basename(file))
|
||||||
|
@assert !isnothing(m)
|
||||||
|
type = parse.(Int, tuple(m[1], m[2], m[3]))
|
||||||
|
|
||||||
|
data = readdlm(file, '&')
|
||||||
|
labels = Symbol.(replace.(strip.(data[1, :]), ' ' => '_', '-' => '_'))
|
||||||
|
groups = data[2:end, :]
|
||||||
|
grps = map(enumerate(eachrow(groups))) do (i, props)
|
||||||
|
nt = (; (Symbol(l) => v for (l, v) in zip(labels, props))...)
|
||||||
|
# @debug i, grp_name(nt)
|
||||||
|
P = all_grps_presentations[grp_name(nt)]
|
||||||
|
grp = TriangleGrp(type, P.generators, P.relations, nt)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
open(joinpath(DATA_DIR, "triangle_groups.json"), "w") do io
|
||||||
|
f(args...) = show_json(args...; indent = 4)
|
||||||
|
s = sprint(f, TriangleGrpSerialization(), grps)
|
||||||
|
# JSON.print(io, , 4)
|
||||||
|
print(io, s)
|
||||||
|
end
|
@ -1,4 +1,4 @@
|
|||||||
include("../src/groupparse.jl")
|
include(joinpath(@__DIR__, "..", "..", "src", "groupparse.jl"))
|
||||||
|
|
||||||
function parse_grouppresentations_abstract(filename::AbstractString)
|
function parse_grouppresentations_abstract(filename::AbstractString)
|
||||||
lines = strip.(readlines(filename))
|
lines = strip.(readlines(filename))
|
Loading…
Reference in New Issue
Block a user