In writing an automatic template generator I've come across an issue that where constants inside a template's combine string cause type promotion to Float64. This may be intended behavior but I wanted to note the behavior's existence here as a bug report.
juliacall.JuliaError: Element type of `x` is Float64 is different from element type of `y` which is Float32.
Stacktrace:
[1] error(s::String)
@ Base .\error.jl:35
[2] _loss(::Vector{Float64}, ::Vector{Float32}, ::L2DistLoss)
@ SymbolicRegression.LossFunctionsModule C:\Users\guney\.julia\packages\SymbolicRegression\L5TJa\src\LossFunctions.jl:25
[3] _eval_loss(tree::TemplateExpression{Float32, TemplateStructure{(:f1,), (), typeof(__sr_template_8838205672071328129), @NamedTuple{f1::Int64}, @NamedTuple{}}, Node{Float32}, ComposableExpression{Float32, Node{Float32}, @NamedTuple{operators::OperatorEnum{Tuple{typeof(+), typeof(-), typeof(*), typeof(/)}, Tuple{typeof(sin), typeof(asinh)}}, variable_names::Nothing, eval_options::EvalOptions{false, false, true, Nothing}}}, @NamedTuple{f1::ComposableExpression{Float32, Node{Float32}, @NamedTuple{operators::OperatorEnum{Tuple{typeof(+), typeof(-), typeof(*), typeof(/)}, Tuple{typeof(sin), typeof(asinh)}}, variable_names::Nothing, eval_options::EvalOptions{false, false, true, Nothing}}}}, @NamedTuple{structure::TemplateStructure{(:f1,), (), typeof(__sr_template_8838205672071328129), @NamedTuple{f1::Int64}, @NamedTuple{}}, operators::OperatorEnum{Tuple{typeof(+), typeof(-), typeof(*), typeof(/)}, Tuple{typeof(sin), typeof(asinh)}}, variable_names::Nothing, parameters::@NamedTuple{}}}, dataset::SymbolicRegression.CoreModule.DatasetModule.BasicDataset{Float32, Float32, Matrix{Float32}, Vector{Float32}, Nothing, @NamedTuple{}, Nothing, Nothing, Nothing, Nothing}, options::Options{SymbolicRegression.CoreModule.OptionsStructModule.ComplexityMapping{Int64, Int64}, OperatorEnum{Tuple{typeof(+), typeof(-), typeof(*), typeof(/)}, Tuple{typeof(sin), typeof(asinh)}}, Node, TemplateExpression, @NamedTuple{structure::TemplateStructure{(:f1,), (), typeof(__sr_template_8838205672071328129), @NamedTuple{f1::Int64}, @NamedTuple{}}}, MutationWeights, false, false, nothing, Nothing, 5}, regularization::Bool)
@ SymbolicRegression.LossFunctionsModule C:\Users\guney\.julia\packages\SymbolicRegression\L5TJa\src\LossFunctions.jl:109
[4] eval_loss(tree::TemplateExpression{Float32, TemplateStructure{(:f1,), (), typeof(__sr_template_8838205672071328129), @NamedTuple{f1::Int64}, @NamedTuple{}}, Node{Float32}, ComposableExpression{Float32, Node{Float32}, @NamedTuple{operators::OperatorEnum{Tuple{typeof(+), typeof(-), typeof(*), typeof(/)}, Tuple{typeof(sin), typeof(asinh)}}, variable_names::Nothing, eval_options::EvalOptions{false, false, true, Nothing}}}, @NamedTuple{f1::ComposableExpression{Float32, Node{Float32}, @NamedTuple{operators::OperatorEnum{Tuple{typeof(+), typeof(-), typeof(*), typeof(/)}, Tuple{typeof(sin), typeof(asinh)}}, variable_names::Nothing, eval_options::EvalOptions{false, false, true, Nothing}}}}, @NamedTuple{structure::TemplateStructure{(:f1,), (), typeof(__sr_template_8838205672071328129), @NamedTuple{f1::Int64}, @NamedTuple{}}, operators::OperatorEnum{Tuple{typeof(+), typeof(-), typeof(*), typeof(/)}, Tuple{typeof(sin), typeof(asinh)}}, variable_names::Nothing, parameters::@NamedTuple{}}}, dataset::SymbolicRegression.CoreModule.DatasetModule.BasicDataset{Float32, Float32, Matrix{Float32}, Vector{Float32}, Nothing, @NamedTuple{}, Nothing, Nothing, Nothing, Nothing}, options::Options{SymbolicRegression.CoreModule.OptionsStructModule.ComplexityMapping{Int64, Int64}, OperatorEnum{Tuple{typeof(+), typeof(-), typeof(*), typeof(/)}, Tuple{typeof(sin), typeof(asinh)}}, Node, TemplateExpression, @NamedTuple{structure::TemplateStructure{(:f1,), (), typeof(__sr_template_8838205672071328129), @NamedTuple{f1::Int64}, @NamedTuple{}}}, MutationWeights, false, false, nothing, Nothing, 5}; regularization::Bool, idx::Nothing)
@ SymbolicRegression.LossFunctionsModule C:\Users\guney\.julia\packages\SymbolicRegression\L5TJa\src\LossFunctions.jl:155
[5] eval_loss
@ C:\Users\guney\.julia\packages\SymbolicRegression\L5TJa\src\LossFunctions.jl:139 [inlined]
[6] update_baseline_loss!
@ C:\Users\guney\.julia\packages\SymbolicRegression\L5TJa\src\LossFunctions.jl:225 [inlined]
[7] _validate_options(datasets::Vector{SymbolicRegression.CoreModule.DatasetModule.BasicDataset{Float32, Float32, Matrix{Float32}, Vector{Float32}, Nothing, @NamedTuple{}, Nothing, Nothing, Nothing, Nothing}}, ropt::SymbolicRegression.SearchUtilsModule.RuntimeOptions{:multithreading, 1, true, Nothing}, options::Options{SymbolicRegression.CoreModule.OptionsStructModule.ComplexityMapping{Int64, Int64}, OperatorEnum{Tuple{typeof(+), typeof(-), typeof(*), typeof(/)}, Tuple{typeof(sin), typeof(asinh)}}, Node, TemplateExpression, @NamedTuple{structure::TemplateStructure{(:f1,), (), typeof(__sr_template_8838205672071328129), @NamedTuple{f1::Int64}, @NamedTuple{}}}, MutationWeights, false, false, nothing, Nothing, 5})
@ SymbolicRegression C:\Users\guney\.julia\packages\SymbolicRegression\L5TJa\src\SymbolicRegression.jl:597
[8] _equation_search(datasets::Vector{SymbolicRegression.CoreModule.DatasetModule.BasicDataset{Float32, Float32, Matrix{Float32}, Vector{Float32}, Nothing, @NamedTuple{}, Nothing, Nothing, Nothing, Nothing}}, ropt::SymbolicRegression.SearchUtilsModule.RuntimeOptions{:multithreading, 1, true, Nothing}, options::Options{SymbolicRegression.CoreModule.OptionsStructModule.ComplexityMapping{Int64, Int64}, OperatorEnum{Tuple{typeof(+), typeof(-), typeof(*), typeof(/)}, Tuple{typeof(sin), typeof(asinh)}}, Node, TemplateExpression, @NamedTuple{structure::TemplateStructure{(:f1,), (), typeof(__sr_template_8838205672071328129), @NamedTuple{f1::Int64}, @NamedTuple{}}}, MutationWeights, false, false, nothing, Nothing, 5}, saved_state::Nothing)
@ SymbolicRegression C:\Users\guney\.julia\packages\SymbolicRegression\L5TJa\src\SymbolicRegression.jl:567
[9] equation_search(datasets::Vector{SymbolicRegression.CoreModule.DatasetModule.BasicDataset{Float32, Float32, Matrix{Float32}, Vector{Float32}, Nothing, @NamedTuple{}, Nothing, Nothing, Nothing, Nothing}}; options::Options{SymbolicRegression.CoreModule.OptionsStructModule.ComplexityMapping{Int64, Int64}, OperatorEnum{Tuple{typeof(+), typeof(-), typeof(*), typeof(/)}, Tuple{typeof(sin), typeof(asinh)}}, Node, TemplateExpression, @NamedTuple{structure::TemplateStructure{(:f1,), (), typeof(__sr_template_8838205672071328129), @NamedTuple{f1::Int64}, @NamedTuple{}}}, MutationWeights, false, false, nothing, Nothing, 5}, saved_state::Nothing, runtime_options::Nothing, runtime_options_kws::@Kwargs{niterations::Int64, parallelism::String, numprocs::Nothing, procs::Nothing, addprocs_function::Nothing, heap_size_hint_in_bytes::Nothing, worker_imports::Nothing, runtests::Bool, return_state::Bool, run_id::String, verbosity::Int64, logger::Nothing, progress::Bool, v_dim_out::Val{1}})
@ SymbolicRegression C:\Users\guney\.julia\packages\SymbolicRegression\L5TJa\src\SymbolicRegression.jl:561
[10] equation_search
@ C:\Users\guney\.julia\packages\SymbolicRegression\L5TJa\src\SymbolicRegression.jl:542 [inlined]
[11] #equation_search#23
@ C:\Users\guney\.julia\packages\SymbolicRegression\L5TJa\src\SymbolicRegression.jl:511 [inlined]
[12] equation_search
@ C:\Users\guney\.julia\packages\SymbolicRegression\L5TJa\src\SymbolicRegression.jl:456 [inlined]
[13] #equation_search#24
@ C:\Users\guney\.julia\packages\SymbolicRegression\L5TJa\src\SymbolicRegression.jl:535 [inlined]
[14] pyjlany_call(self::typeof(equation_search), args_::Py, kwargs_::Py)
@ PythonCall.JlWrap C:\Users\guney\.julia\packages\PythonCall\avYrV\src\JlWrap\any.jl:44
[15] _pyjl_callmethod(f::Any, self_::Ptr{PythonCall.C.PyObject}, args_::Ptr{PythonCall.C.PyObject}, nargs::Int64)
@ PythonCall.JlWrap C:\Users\guney\.julia\packages\PythonCall\avYrV\src\JlWrap\base.jl:73
[16] _pyjl_callmethod(o::Ptr{PythonCall.C.PyObject}, args::Ptr{PythonCall.C.PyObject})
@ PythonCall.JlWrap.Cjl C:\Users\guney\.julia\packages\PythonCall\avYrV\src\JlWrap\C.jl:63
What happened?
In writing an automatic template generator I've come across an issue that where constants inside a template's
combinestring cause type promotion toFloat64. This may be intended behavior but I wanted to note the behavior's existence here as a bug report.Example template:
Small script to reproduce the issue:
Possible Solution
In my case, I implemented a
sympy,printing.str.StrPrintersubclass to wrap any float literal with the appropriate precision. The implementation:JFloatfunction.p//qnotation to avoid literals (may be redundant)f"Float{prec}"function.The steps above convert the example template (
2*x + 1.0 + f1(x,y)) to'x*Float32(2//1) + Float32(1.00000000000000) + f1(x, y)'for the default precision of 32 bits.Below is an implementation:
Notes
My implementation requires the
combinestring to be parsed in python and converted tosympyobject before getting passed to julia. There might be a more elegant solution on the julia-side.Version
1.5.9
Operating System
Windows
Package Manager
pip
Interface
Script (i.e.,
python my_script.py)Relevant log output
Extra Info
No response