JuliaFormatter.jl
JuliaFormatter.jl copied to clipboard
Formatting issues and changes between 1 and 2
Just pasting the diffs of some of my packages to help identify new formatting issues. These overlap with #878 and #879. I’m using BlueStyle.
As you can see, many of these are superficial changes (though some be violating Blue, not sure). However, some of these formatting changes are semantic differences like #878.
Here's the diff for SymbolicRegression.jl
diff --git a/benchmark/benchmarks.jl b/benchmark/benchmarks.jl
index 9e486234..07ecd1c2 100644
--- a/benchmark/benchmarks.jl
+++ b/benchmark/benchmarks.jl
@@ -88,10 +88,10 @@ function create_utils_benchmark()
suite["best_of_sample"] = @benchmarkable(
best_of_sample(pop, rss, $options),
setup = (
- nfeatures = 1;
- dataset = Dataset(randn(nfeatures, 32), randn(32));
- pop = Population(dataset; npop=100, nlength=20, options=$options, nfeatures);
- rss = RunningSearchStatistics(; options=$options)
+ nfeatures=1;
+ dataset=Dataset(randn(nfeatures, 32), randn(32));
+ pop=Population(dataset; npop=100, nlength=20, options=($options), nfeatures);
+ rss=RunningSearchStatistics(; options=($options))
)
)
@@ -110,9 +110,9 @@ function create_utils_benchmark()
end
end,
setup = (
- nfeatures = 1;
- dataset = Dataset(randn(nfeatures, 32), randn(32));
- mutation_weights = MutationWeights(;
+ nfeatures=1;
+ dataset=Dataset(randn(nfeatures, 32), randn(32));
+ mutation_weights=MutationWeights(;
mutate_constant=1.0,
mutate_operator=1.0,
swap_operands=1.0,
@@ -125,21 +125,23 @@ function create_utils_benchmark()
form_connection=0.0,
break_connection=0.0,
);
- options = Options(;
- unary_operators=[sin, cos], binary_operators=[+, -, *, /], mutation_weights
+ options=Options(;
+ unary_operators=[sin, cos],
+ binary_operators=[+, -, *, /],
+ mutation_weights,
);
- recorder = RecordType();
- temperature = 1.0;
- curmaxsize = 20;
- rss = RunningSearchStatistics(; options);
- trees = [
+ recorder=RecordType();
+ temperature=1.0;
+ curmaxsize=20;
+ rss=RunningSearchStatistics(; options);
+ trees=[
gen_random_tree_fixed_size(15, options, nfeatures, Float64) for _ in 1:100
];
- expressions = [
+ expressions=[
Expression(tree; operators=options.operators, variable_names=["x1"]) for
tree in trees
];
- members = [
+ members=[
PopMember(dataset, expression, options; deterministic=false) for
expression in expressions
]
@@ -149,18 +151,18 @@ function create_utils_benchmark()
ntrees = 10
suite["optimize_constants_x10"] = @benchmarkable(
foreach(members) do member
- optimize_constants(dataset, member, $options)
+ return optimize_constants(dataset, member, $options)
end,
seconds = 20,
setup = (
- nfeatures = 1;
- T = Float64;
- dataset = Dataset(randn(nfeatures, 512), randn(512));
- ntrees = $ntrees;
- trees = [
+ nfeatures=1;
+ T=Float64;
+ dataset=Dataset(randn(nfeatures, 512), randn(512));
+ ntrees=($ntrees);
+ trees=[
gen_random_tree_fixed_size(20, $options, nfeatures, T) for i in 1:ntrees
];
- members = [
+ members=[
PopMember(dataset, tree, $options; deterministic=false) for tree in trees
]
)
@@ -176,12 +178,12 @@ function create_utils_benchmark()
)
s[T] = @benchmarkable(
foreach(trees) do tree
- compute_complexity(tree, $options)
+ return compute_complexity(tree, $options)
end,
setup = (
- T = Float64;
- nfeatures = 3;
- trees = [
+ T=Float64;
+ nfeatures=3;
+ trees=[
gen_random_tree_fixed_size(20, $options, nfeatures, T) for
i in 1:($ntrees)
]
@@ -194,12 +196,14 @@ function create_utils_benchmark()
if isdefined(SymbolicRegression.MutationFunctionsModule, :randomly_rotate_tree!)
suite["randomly_rotate_tree_x10"] = @benchmarkable(
foreach(trees) do tree
- SymbolicRegression.MutationFunctionsModule.randomly_rotate_tree!(tree)
+ return SymbolicRegression.MutationFunctionsModule.randomly_rotate_tree!(
+ tree
+ )
end,
setup = (
- T = Float64;
- nfeatures = 3;
- trees = [
+ T=Float64;
+ nfeatures=3;
+ trees=[
gen_random_tree_fixed_size(20, $options, nfeatures, T) for
i in 1:($ntrees)
]
@@ -209,14 +213,14 @@ function create_utils_benchmark()
suite["insert_random_op_x10"] = @benchmarkable(
foreach(trees) do tree
- SymbolicRegression.MutationFunctionsModule.insert_random_op(
+ return SymbolicRegression.MutationFunctionsModule.insert_random_op(
tree, $options, nfeatures
)
end,
setup = (
- T = Float64;
- nfeatures = 3;
- trees = [
+ T=Float64;
+ nfeatures=3;
+ trees=[
gen_random_tree_fixed_size(20, $options, nfeatures, T) for i in 1:($ntrees)
]
)
@@ -237,12 +241,12 @@ function create_utils_benchmark()
)
suite["check_constraints_x10"] = @benchmarkable(
foreach(trees) do tree
- check_constraints(tree, $options, $options.maxsize)
+ return check_constraints(tree, $options, $options.maxsize)
end,
setup = (
- T = Float64;
- nfeatures = 3;
- trees = [
+ T=Float64;
+ nfeatures=3;
+ trees=[
gen_random_tree_fixed_size(20, $options, nfeatures, T) for i in 1:($ntrees)
]
)
diff --git a/ext/SymbolicRegressionEnzymeExt.jl b/ext/SymbolicRegressionEnzymeExt.jl
index b8b8be60..9929567f 100644
--- a/ext/SymbolicRegressionEnzymeExt.jl
+++ b/ext/SymbolicRegressionEnzymeExt.jl
@@ -39,7 +39,7 @@ function (g::GradEvaluator{<:Any,<:AutoEnzyme})(_, G, x::AbstractVector{T}) wher
doutput = [one(T)]
with_stacksize(32 * 1024 * 1024) do
- autodiff(
+ return autodiff(
Reverse,
evaluator,
Duplicated(g.f.tree, g.extra.storage_tree),
diff --git a/ext/SymbolicRegressionJSON3Ext.jl b/ext/SymbolicRegressionJSON3Ext.jl
index 1384f690..a78cd2f0 100644
--- a/ext/SymbolicRegressionJSON3Ext.jl
+++ b/ext/SymbolicRegressionJSON3Ext.jl
@@ -5,7 +5,7 @@ import SymbolicRegression.UtilsModule: json3_write
function json3_write(record, recorder_file)
open(recorder_file, "w") do io
- JSON3.write(io, record; allow_inf=true)
+ return JSON3.write(io, record; allow_inf=true)
end
end
diff --git a/src/CheckConstraints.jl b/src/CheckConstraints.jl
index fb0bbb71..7c95ff7e 100644
--- a/src/CheckConstraints.jl
+++ b/src/CheckConstraints.jl
@@ -73,7 +73,7 @@ end
function check_constraints(
ex::AbstractExpression,
options::AbstractOptions,
- maxsize::Int,
+ maxsize::Int;
cursize::Union{Int,Nothing}=nothing,
)::Bool
tree = get_tree(ex)
@@ -82,7 +82,7 @@ end
function check_constraints(
tree::AbstractExpressionNode,
options::AbstractOptions,
- maxsize::Int,
+ maxsize::Int;
cursize::Union{Int,Nothing}=nothing,
)::Bool
((cursize === nothing) ? compute_complexity(tree, options) : cursize) > maxsize &&
@@ -102,8 +102,8 @@ function check_constraints(
return true
end
-check_constraints(
- ex::Union{AbstractExpression,AbstractExpressionNode}, options::AbstractOptions
-)::Bool = check_constraints(ex, options, options.maxsize)
+check_constraints(ex::Union{AbstractExpression,AbstractExpressionNode}, options::AbstractOptions)::Bool = check_constraints(
+ ex, options, options.maxsize
+)
end
diff --git a/src/Configure.jl b/src/Configure.jl
index 2b184e5c..9207eee1 100644
--- a/src/Configure.jl
+++ b/src/Configure.jl
@@ -1,6 +1,6 @@
const TEST_TYPE = Float32
-function test_operator(op::F, x::T, y=nothing) where {F,T}
+function test_operator(op::F, x::T; y=nothing) where {F,T}
local output
try
output = y === nothing ? op(x) : op(x, y)
@@ -196,12 +196,10 @@ function activate_env_on_workers(
)
verbosity > 0 && @info "Activating environment on workers."
@everywhere procs begin
- Base.MainInclude.eval(
- quote
- using Pkg
- Pkg.activate($$project_path)
- end,
- )
+ Base.MainInclude.eval(quote
+ using Pkg
+ Pkg.activate($$project_path)
+ end)
end
end
@@ -237,12 +235,9 @@ function import_module_on_workers(
# to JuliaLang.
for ext in relevant_extensions
- push!(
- expr.args,
- quote
- using $ext: $ext
- end,
- )
+ push!(expr.args, quote
+ using $ext: $ext
+ end)
end
verbosity > 0 && if isempty(relevant_extensions)
diff --git a/src/ConstantOptimization.jl b/src/ConstantOptimization.jl
index 92b5d0c5..2cd1fa46 100644
--- a/src/ConstantOptimization.jl
+++ b/src/ConstantOptimization.jl
@@ -125,7 +125,7 @@ function (g::GradEvaluator{<:Any,AD})(_, G, x::AbstractVector) where {AD}
AD isa AutoEnzyme && error("Please load the `Enzyme.jl` package.")
set_scalar_constants!(g.f.tree, x, g.f.refs)
(val, grad) = value_and_gradient(g.backend, g.f.tree) do tree
- eval_loss(tree, g.f.dataset, g.f.options; regularization=false, idx=g.f.idx)
+ return eval_loss(tree, g.f.dataset, g.f.options; regularization=false, idx=g.f.idx)
end
if G !== nothing && grad !== nothing
G .= extract_gradient(grad, g.f.tree)
diff --git a/src/Dataset.jl b/src/Dataset.jl
index f9e28bcc..64c2ad89 100644
--- a/src/Dataset.jl
+++ b/src/Dataset.jl
@@ -90,9 +90,9 @@ end
Construct a dataset to pass between internal functions.
"""
function Dataset(
- X::AbstractMatrix{T},
+ X::AbstractMatrix{T};
y::Union{AbstractVector,Nothing}=nothing,
- loss_type::Type{L}=Nothing;
+ loss_type::Type{L}=Nothing,
index::Int=1,
weights::Union{AbstractVector,Nothing}=nothing,
variable_names::Union{Array{String,1},Nothing}=nothing,
diff --git a/src/ExpressionBuilder.jl b/src/ExpressionBuilder.jl
index 709937ec..b3bbaaa2 100644
--- a/src/ExpressionBuilder.jl
+++ b/src/ExpressionBuilder.jl
@@ -27,7 +27,7 @@ import DynamicExpressions: get_operators
import ..CoreModule: create_expression
@unstable function create_expression(
- t::T, options::AbstractOptions, dataset::Dataset{T,L}, ::Val{embed}=Val(false)
+ t::T, options::AbstractOptions, dataset::Dataset{T,L}, (::Val{embed})=Val(false)
) where {T,L,embed}
return create_expression(
t, options, dataset, options.node_type, options.expression_type, Val(embed)
@@ -37,7 +37,7 @@ end
t::AbstractExpressionNode{T},
options::AbstractOptions,
dataset::Dataset{T,L},
- ::Val{embed}=Val(false),
+ (::Val{embed})=Val(false),
) where {T,L,embed}
return create_expression(
t, options, dataset, options.node_type, options.expression_type, Val(embed)
@@ -46,8 +46,8 @@ end
function create_expression(
ex::AbstractExpression{T},
options::AbstractOptions,
- ::Dataset{T,L},
- ::Val{embed}=Val(false),
+ ::Dataset{T,L};
+ (::Val{embed})=Val(false),
) where {T,L,embed}
return ex::options.expression_type
end
@@ -57,7 +57,7 @@ end
dataset::Dataset{T,L},
::Type{N},
::Type{E},
- ::Val{embed}=Val(false),
+ (::Val{embed})=Val(false),
) where {T,L,embed,N<:AbstractExpressionNode,E<:AbstractExpression}
return create_expression(constructorof(N)(; val=t), options, dataset, N, E, Val(embed))
end
@@ -67,7 +67,7 @@ end
dataset::Dataset{T,L},
::Type{<:AbstractExpressionNode},
::Type{E},
- ::Val{embed}=Val(false),
+ (::Val{embed})=Val(false),
) where {T,L,embed,E<:AbstractExpression}
return constructorof(E)(t; init_params(options, dataset, nothing, Val(embed))...)
end
diff --git a/src/HallOfFame.jl b/src/HallOfFame.jl
index a75b8293..91e27d80 100644
--- a/src/HallOfFame.jl
+++ b/src/HallOfFame.jl
@@ -102,7 +102,7 @@ function calculate_pareto_frontier(hallOfFame::HallOfFame{T,L,N}) where {T,L,N}
# We check if this member is better than all members which are smaller than it and
# also exist.
betterThanAllSmaller = true
- for i in 1:(size - 1)
+ for i in 1:(size-1)
if !hallOfFame.exists[i]
continue
end
diff --git a/src/LossFunctions.jl b/src/LossFunctions.jl
index 01dcca86..cfe47f47 100644
--- a/src/LossFunctions.jl
+++ b/src/LossFunctions.jl
@@ -162,7 +162,7 @@ function loss_to_score(
use_baseline::Bool,
baseline::L,
member,
- options::AbstractOptions,
+ options::AbstractOptions;
complexity::Union{Int,Nothing}=nothing,
)::L where {L<:LOSS_TYPE}
# TODO: Come up with a more general normalization scheme.
diff --git a/src/MLJInterface.jl b/src/MLJInterface.jl
index 4d7a1d14..8400f6d4 100644
--- a/src/MLJInterface.jl
+++ b/src/MLJInterface.jl
@@ -217,7 +217,7 @@ function _update(m, verbosity, old_fitresult, old_cache, X, y, w, options, class
variable_names=variable_names,
y_variable_names=y_variable_names,
y_is_table=MMI.istable(y),
- has_classes=!isnothing(classes),
+ has_classes=(!isnothing(classes)),
X_units=X_units_clean,
y_units=y_units_clean,
types=(
diff --git a/src/Mutate.jl b/src/Mutate.jl
index 7b828f6f..e61776fc 100644
--- a/src/Mutate.jl
+++ b/src/Mutate.jl
@@ -349,7 +349,7 @@ end
kws...,
) where {W<:AbstractMutationWeights}
mutation_choices = fieldnames(W)
- quote
+ return quote
Base.Cartesian.@nif(
$(length(mutation_choices)),
i -> mutation_choice == $(mutation_choices)[i],
diff --git a/src/MutationFunctions.jl b/src/MutationFunctions.jl
index 73e0367b..007b0738 100644
--- a/src/MutationFunctions.jl
+++ b/src/MutationFunctions.jl
@@ -47,7 +47,7 @@ Return a random node from the tree. You may optionally
filter the nodes matching some condition before sampling.
"""
function random_node(
- tree::AbstractNode, rng::AbstractRNG=default_rng(); filter::F=Returns(true)
+ tree::AbstractNode; rng::AbstractRNG=default_rng(), filter::F=Returns(true)
) where {F<:Function}
Base.depwarn(
"Instead of `random_node(tree, filter)`, use `rand(NodeSampler(; tree, filter))`",
@@ -73,7 +73,7 @@ end
"""Randomly convert an operator into another one (binary->binary; unary->unary)"""
function mutate_operator(
- ex::AbstractExpression{T}, options::AbstractOptions, rng::AbstractRNG=default_rng()
+ ex::AbstractExpression{T}, options::AbstractOptions; rng::AbstractRNG=default_rng()
) where {T<:DATA_TYPE}
tree, context = get_contents_for_mutation(ex, rng)
ex = with_contents_for_mutation(ex, mutate_operator(tree, options, rng), context)
@@ -81,7 +81,7 @@ function mutate_operator(
end
function mutate_operator(
tree::AbstractExpressionNode{T},
- options::AbstractOptions,
+ options::AbstractOptions;
rng::AbstractRNG=default_rng(),
) where {T}
if !(has_operators(tree))
@@ -100,7 +100,7 @@ end
function mutate_constant(
ex::AbstractExpression{T},
temperature,
- options::AbstractOptions,
+ options::AbstractOptions;
rng::AbstractRNG=default_rng(),
) where {T<:DATA_TYPE}
tree, context = get_contents_for_mutation(ex, rng)
@@ -112,7 +112,7 @@ end
function mutate_constant(
tree::AbstractExpressionNode{T},
temperature,
- options::AbstractOptions,
+ options::AbstractOptions;
rng::AbstractRNG=default_rng(),
) where {T<:DATA_TYPE}
# T is between 0 and 1.
@@ -147,8 +147,8 @@ end
function append_random_op(
ex::AbstractExpression{T},
options::AbstractOptions,
- nfeatures::Int,
- rng::AbstractRNG=default_rng();
+ nfeatures::Int;
+ rng::AbstractRNG=default_rng(),
makeNewBinOp::Union{Bool,Nothing}=nothing,
) where {T<:DATA_TYPE}
tree, context = get_contents_for_mutation(ex, rng)
@@ -160,8 +160,8 @@ end
function append_random_op(
tree::AbstractExpressionNode{T},
options::AbstractOptions,
- nfeatures::Int,
- rng::AbstractRNG=default_rng();
+ nfeatures::Int;
+ rng::AbstractRNG=default_rng(),
makeNewBinOp::Union{Bool,Nothing}=nothing,
) where {T<:DATA_TYPE}
node = rand(rng, NodeSampler(; tree, filter=t -> t.degree == 0))
@@ -193,7 +193,7 @@ end
function insert_random_op(
ex::AbstractExpression{T},
options::AbstractOptions,
- nfeatures::Int,
+ nfeatures::Int;
rng::AbstractRNG=default_rng(),
) where {T<:DATA_TYPE}
tree, context = get_contents_for_mutation(ex, rng)
@@ -205,7 +205,7 @@ end
function insert_random_op(
tree::AbstractExpressionNode{T},
options::AbstractOptions,
- nfeatures::Int,
+ nfeatures::Int;
rng::AbstractRNG=default_rng(),
) where {T<:DATA_TYPE}
node = rand(rng, NodeSampler(; tree))
@@ -229,7 +229,7 @@ end
function prepend_random_op(
ex::AbstractExpression{T},
options::AbstractOptions,
- nfeatures::Int,
+ nfeatures::Int;
rng::AbstractRNG=default_rng(),
) where {T<:DATA_TYPE}
tree, context = get_contents_for_mutation(ex, rng)
@@ -241,7 +241,7 @@ end
function prepend_random_op(
tree::AbstractExpressionNode{T},
options::AbstractOptions,
- nfeatures::Int,
+ nfeatures::Int;
rng::AbstractRNG=default_rng(),
) where {T<:DATA_TYPE}
node = tree
@@ -264,9 +264,9 @@ end
function make_random_leaf(
nfeatures::Int,
::Type{T},
- ::Type{N},
+ ::Type{N};
rng::AbstractRNG=default_rng(),
- ::Union{AbstractOptions,Nothing}=nothing,
+ (::Union{AbstractOptions,Nothing})=nothing,
) where {T<:DATA_TYPE,N<:AbstractExpressionNode}
if rand(rng, Bool)
return constructorof(N)(T; val=randn(rng, T))
@@ -292,7 +292,7 @@ end
function delete_random_op!(
ex::AbstractExpression{T},
options::AbstractOptions,
- nfeatures::Int,
+ nfeatures::Int;
rng::AbstractRNG=default_rng(),
) where {T<:DATA_TYPE}
tree, context = get_contents_for_mutation(ex, rng)
@@ -304,7 +304,7 @@ end
function delete_random_op!(
tree::AbstractExpressionNode{T},
options::AbstractOptions,
- nfeatures::Int,
+ nfeatures::Int;
rng::AbstractRNG=default_rng(),
) where {T<:DATA_TYPE}
node, parent, side = random_node_and_parent(tree, rng)
@@ -363,7 +363,7 @@ function randomize_tree(
::AbstractExpressionNode{T},
curmaxsize::Int,
options::AbstractOptions,
- nfeatures::Int,
+ nfeatures::Int;
rng::AbstractRNG=default_rng(),
) where {T<:DATA_TYPE}
tree_size_to_generate = rand(rng, 1:curmaxsize)
@@ -375,7 +375,7 @@ function gen_random_tree(
length::Int,
options::AbstractOptions,
nfeatures::Int,
- ::Type{T},
+ ::Type{T};
rng::AbstractRNG=default_rng(),
) where {T<:DATA_TYPE}
# Note that this base tree is just a placeholder; it will be replaced.
@@ -391,7 +391,7 @@ function gen_random_tree_fixed_size(
node_count::Int,
options::AbstractOptions,
nfeatures::Int,
- ::Type{T},
+ ::Type{T};
rng::AbstractRNG=default_rng(),
) where {T<:DATA_TYPE}
tree = make_random_leaf(nfeatures, T, options.node_type, rng, options)
@@ -409,7 +409,7 @@ function gen_random_tree_fixed_size(
end
function crossover_trees(
- ex1::E, ex2::E, rng::AbstractRNG=default_rng()
+ ex1::E, ex2::E; rng::AbstractRNG=default_rng()
) where {T,E<:AbstractExpression{T}}
if ex1 === ex2
error("Attempted to crossover the same expression!")
@@ -424,7 +424,7 @@ end
"""Crossover between two expressions"""
function crossover_trees(
- tree1::N, tree2::N, rng::AbstractRNG=default_rng()
+ tree1::N, tree2::N; rng::AbstractRNG=default_rng()
) where {T,N<:AbstractExpressionNode{T}}
if tree1 === tree2
error("Attempted to crossover the same tree!")
diff --git a/src/OptionsStruct.jl b/src/OptionsStruct.jl
index fa8a0035..c002a775 100644
--- a/src/OptionsStruct.jl
+++ b/src/OptionsStruct.jl
@@ -118,9 +118,11 @@ Controls level of specialization we compile into `Options`.
Overload if needed for custom expression types.
"""
-operator_specialization(
+function operator_specialization(
::Type{O}, ::Type{<:AbstractExpression}
-) where {O<:AbstractOperatorEnum} = O
+) where {O<:AbstractOperatorEnum}
+ return O
+end
@unstable operator_specialization(::Type{<:OperatorEnum}, ::Type{<:AbstractExpression}) =
OperatorEnum
@@ -288,7 +290,7 @@ end
# Return an options struct with concrete operators
type_parameters = O.parameters
fields = Any[:(getfield(options, $(QuoteNode(k)))) for k in fieldnames(O)]
- quote
+ return quote
operators = getfield(options, :operators)
Options{$(type_parameters[1]),typeof(operators),$(type_parameters[3:end]...)}(
$(fields...)
diff --git a/src/ParametricExpression.jl b/src/ParametricExpression.jl
index f98a1de0..5c9958ff 100644
--- a/src/ParametricExpression.jl
+++ b/src/ParametricExpression.jl
@@ -107,7 +107,7 @@ end
function MF.make_random_leaf(
nfeatures::Int,
::Type{T},
- ::Type{N},
+ ::Type{N};
rng::AbstractRNG=default_rng(),
options::Union{AbstractOptions,Nothing}=nothing,
) where {T<:DATA_TYPE,N<:ParametricNode}
@@ -132,7 +132,7 @@ end
function MF.crossover_trees(
ex1::ParametricExpression{T},
- ex2::ParametricExpression{T},
+ ex2::ParametricExpression{T};
rng::AbstractRNG=default_rng(),
) where {T}
tree1 = get_contents(ex1)
@@ -167,7 +167,7 @@ end
function MF.mutate_constant(
ex::ParametricExpression{T},
temperature,
- options::AbstractOptions,
+ options::AbstractOptions;
rng::AbstractRNG=default_rng(),
) where {T<:DATA_TYPE}
if rand(rng, Bool)
diff --git a/src/PopMember.jl b/src/PopMember.jl
index 63cfebfb..2f42ab48 100644
--- a/src/PopMember.jl
+++ b/src/PopMember.jl
@@ -60,9 +60,9 @@ and loss.
function PopMember(
t::AbstractExpression{T},
score::L,
- loss::L,
+ loss::L;
options::Union{AbstractOptions,Nothing}=nothing,
- complexity::Union{Int,Nothing}=nothing;
+ complexity::Union{Int,Nothing}=nothing,
ref::Int=-1,
parent::Int=-1,
deterministic=nothing,
@@ -108,8 +108,8 @@ Automatically compute the score for this tree.
function PopMember(
dataset::Dataset{T,L},
tree::Union{AbstractExpressionNode{T},AbstractExpression{T}},
- options::AbstractOptions,
- complexity::Union{Int,Nothing}=nothing;
+ options::AbstractOptions;
+ complexity::Union{Int,Nothing}=nothing,
ref::Int=-1,
parent::Int=-1,
deterministic=nothing,
diff --git a/src/Population.jl b/src/Population.jl
index 54aabd36..b3c19baa 100644
--- a/src/Population.jl
+++ b/src/Population.jl
@@ -173,7 +173,7 @@ const CACHED_WEIGHTS =
p = options.tournament_selection_p
# Computing the weights for the tournament becomes quite expensive,
return get!(CACHED_WEIGHTS, (n, p)) do
- k = collect(0:(n - 1))
+ k = collect(0:(n-1))
prob_each = p * ((1 - p) .^ k)
return StatsBase.Weights(prob_each, sum(prob_each))
diff --git a/src/Recorder.jl b/src/Recorder.jl
index 171a1f46..2348af36 100644
--- a/src/Recorder.jl
+++ b/src/Recorder.jl
@@ -4,7 +4,7 @@ using ..CoreModule: RecordType
"Assumes that `options` holds the user options::AbstractOptions"
macro recorder(ex)
- quote
+ return quote
if $(esc(:options)).use_recorder
$(esc(ex))
end
diff --git a/src/SearchUtils.jl b/src/SearchUtils.jl
index 23358d9d..f3d5fdca 100644
--- a/src/SearchUtils.jl
+++ b/src/SearchUtils.jl
@@ -262,8 +262,8 @@ macro sr_spawner(expr, kws...)
@assert all(ex -> ex.head == :(=), kws)
@assert any(ex -> ex.args[1] == :parallelism, kws)
@assert any(ex -> ex.args[1] == :worker_idx, kws)
- parallelism = kws[findfirst(ex -> ex.args[1] == :parallelism, kws)::Int].args[2]
- worker_idx = kws[findfirst(ex -> ex.args[1] == :worker_idx, kws)::Int].args[2]
+ parallelism = kws[findfirst(ex -> ex.args[1] == :parallelism, kws) :: Int].args[2]
+ worker_idx = kws[findfirst(ex -> ex.args[1] == :worker_idx, kws) :: Int].args[2]
return quote
if $(parallelism) == :serial
$(expr)
@@ -363,7 +363,7 @@ end
function _check_for_loss_threshold(halls_of_fame, f::F, options::AbstractOptions) where {F}
return all(halls_of_fame) do hof
any(hof.members[hof.exists]) do member
- f(member.loss, compute_complexity(member, options))::Bool
+ return f(member.loss, compute_complexity(member, options))::Bool
end
end
end
@@ -431,7 +431,7 @@ function update_progress_bar!(
dataset::Dataset{T,L},
options::AbstractOptions,
equation_speed::Vector{Float32},
- head_node_occupation::Float64,
+ head_node_occupation::Float64;
parallelism=:serial,
) where {T,L}
equation_strings = string_dominating_pareto_curve(
@@ -604,7 +604,7 @@ function save_to_file(
# Write file twice in case exit in middle of filewrite
for out_file in (output_file, output_file * ".bkup")
open(out_file, "w") do io
- write(io, s)
+ return write(io, s)
end
end
return nothing
diff --git a/src/SymbolicRegression.jl b/src/SymbolicRegression.jl
index 0530dba4..a4911266 100644
--- a/src/SymbolicRegression.jl
+++ b/src/SymbolicRegression.jl
@@ -800,9 +800,9 @@ function _main_search_loop!(
resource_monitor = ResourceMonitor(;
# Storing n times as many monitoring intervals as populations seems like it will
# help get accurate resource estimates:
- max_recordings=options.populations * 100 * nout,
- start_reporting_at=options.populations * 3 * nout,
- window_size=options.populations * 2 * nout,
+ max_recordings=(options.populations * 100 * nout),
+ start_reporting_at=(options.populations * 3 * nout),
+ window_size=(options.populations * 2 * nout),
)
while sum(state.cycles_remaining) > 0
kappa += 1
@@ -1093,7 +1093,7 @@ using ConstructionBase: ConstructionBase as _
include("precompile.jl")
redirect_stdout(devnull) do
redirect_stderr(devnull) do
- do_precompilation(Val(:precompile))
+ return do_precompilation(Val(:precompile))
end
end
diff --git a/src/TemplateExpression.jl b/src/TemplateExpression.jl
index dfc8b59c..fff6e79e 100644
--- a/src/TemplateExpression.jl
+++ b/src/TemplateExpression.jl
@@ -162,8 +162,8 @@ function EB.create_expression(
options::AbstractOptions,
dataset::Dataset{T,L},
::Type{<:AbstractExpressionNode},
- ::Type{E},
- ::Val{embed}=Val(false),
+ ::Type{E};
+ (::Val{embed})=Val(false),
) where {T,L,embed,E<:TemplateExpression}
function_keys = keys(options.expression_options.variable_mapping)
@@ -219,8 +219,8 @@ function DE.string_tree(
end
function DE.eval_tree_array(
tree::TemplateExpression{T},
- cX::AbstractMatrix{T},
- operators::Union{AbstractOperatorEnum,Nothing}=nothing;
+ cX::AbstractMatrix{T};
+ operators::Union{AbstractOperatorEnum,Nothing}=nothing,
kws...,
) where {T}
raw_contents = get_contents(tree)
@@ -294,7 +294,7 @@ end
"""We combine the operators of each inner expression."""
function DE.combine_operators(
- ex::TemplateExpression{T,N}, operators::Union{AbstractOperatorEnum,Nothing}=nothing
+ ex::TemplateExpression{T,N}; operators::Union{AbstractOperatorEnum,Nothing}=nothing
) where {T,N}
raw_contents = get_contents(ex)
function_keys = keys(raw_contents)
@@ -306,7 +306,7 @@ end
"""We simplify each inner expression."""
function DE.simplify_tree!(
- ex::TemplateExpression{T,N}, operators::Union{AbstractOperatorEnum,Nothing}=nothing
+ ex::TemplateExpression{T,N}; operators::Union{AbstractOperatorEnum,Nothing}=nothing
) where {T,N}
raw_contents = get_contents(ex)
function_keys = keys(raw_contents)
@@ -323,7 +323,7 @@ end
function CC.check_constraints(
ex::TemplateExpression,
options::AbstractOptions,
- maxsize::Int,
+ maxsize::Int;
cursize::Union{Int,Nothing}=nothing,
)::Bool
raw_contents = get_contents(ex)
@@ -333,7 +333,7 @@ function CC.check_constraints(
has_invalid_variables = any(keys(raw_contents)) do key
tree = raw_contents[key]
allowed_variables = variable_mapping[key]
- contains_other_features_than(tree, allowed_variables)
+ return contains_other_features_than(tree, allowed_variables)
end
if has_invalid_variables
return false
@@ -354,7 +354,7 @@ function contains_other_features_than(tree::AbstractExpression, features)
end
function contains_other_features_than(tree::AbstractExpressionNode, features)
any(tree) do node
- node.degree == 0 && !node.constant && node.feature ∉ features
+ return node.degree == 0 && !node.constant && node.feature ∉ features
end
end
diff --git a/src/Utils.jl b/src/Utils.jl
index da67bcf4..0f73a648 100644
--- a/src/Utils.jl
+++ b/src/Utils.jl
@@ -95,8 +95,9 @@ const max_ops = 8192
const vals = ntuple(Val, max_ops)
"""Return the bottom k elements of x, and their indices."""
-bottomk_fast(x::AbstractVector{T}, k) where {T} =
- _bottomk_dispatch(x, vals[k])::Tuple{Vector{T},Vector{Int}}
+function bottomk_fast(x::AbstractVector{T}, k) where {T}
+ return _bottomk_dispatch(x, vals[k])::Tuple{Vector{T},Vector{Int}}
+end
function _bottomk_dispatch(x::AbstractVector{T}, ::Val{k}) where {T,k}
if k == 1
@@ -113,7 +114,7 @@ function _bottomk!(x, minval, indmin)
if new_min
minval[end] = x[i]
indmin[end] = i
- for ki in lastindex(minval):-1:(firstindex(minval) + 1)
+ for ki in lastindex(minval):-1:(firstindex(minval)+1)
need_swap = minval[ki] < minval[ki - 1]
if need_swap
minval[ki], minval[ki - 1] = minval[ki - 1], minval[ki]
diff --git a/test/manual_distributed.jl b/test/manual_distributed.jl
index 0d6490c6..e9a9f4e9 100644
--- a/test/manual_distributed.jl
+++ b/test/manual_distributed.jl
@@ -5,12 +5,10 @@ procs = addprocs(2)
using Test, Pkg
project_path = splitdir(Pkg.project().path)[1]
@everywhere procs begin
- Base.MainInclude.eval(
- quote
- using Pkg
- Pkg.activate($$project_path)
- end,
- )
+ Base.MainInclude.eval(quote
+ using Pkg
+ Pkg.activate($$project_path)
+ end)
end
@everywhere using SymbolicRegression
@everywhere _inv(x::Float32)::Float32 = 1.0f0 / x
diff --git a/test/test_dataset.jl b/test/test_dataset.jl
index 50547197..20717f72 100644
--- a/test/test_dataset.jl
+++ b/test/test_dataset.jl
@@ -13,7 +13,7 @@ end
using SymbolicRegression
using DispatchDoctor: allow_unstable
dataset = allow_unstable() do
- Dataset(randn(ComplexF32, 3, 32), randn(ComplexF32, 32); loss_type=Float64)
+ return Dataset(randn(ComplexF32, 3, 32), randn(ComplexF32, 32); loss_type=Float64)
end
@test dataset isa Dataset{ComplexF32,Float64}
end
diff --git a/test/test_derivatives.jl b/test/test_derivatives.jl
index 9615a927..cbc411d0 100644
--- a/test/test_derivatives.jl
+++ b/test/test_derivatives.jl
@@ -135,7 +135,7 @@ function check_tree(
tree::AbstractExpressionNode, node_index::NodeIndex, constant_list::AbstractVector
)
if tree.degree == 0
- (!tree.constant) || tree.val == constant_list[node_index.val::UInt16]
+ (!tree.constant) || tree.val == constant_list[node_index.val:: UInt16]
elseif tree.degree == 1
check_tree(tree.l, node_index.l, constant_list)
else
diff --git a/test/test_expression_derivatives.jl b/test/test_expression_derivatives.jl
index c8cba75a..b8953d5e 100644
--- a/test/test_expression_derivatives.jl
+++ b/test/test_expression_derivatives.jl
@@ -17,7 +17,7 @@
x = @view X[1, :]
y = @view X[2, :]
- sum(i -> x[i] * x[i] - cos(2.5 * y[i]), eachindex(x))
+ return sum(i -> x[i] * x[i] - cos(2.5 * y[i]), eachindex(x))
end
δy_hat = ex'(X)
@@ -28,7 +28,7 @@
x = @view X[1, :]
y = @view X[2, :]
- sum(i -> (x[i] + x[i]) * sin(2.5 + y[i]), eachindex(x))
+ return sum(i -> (x[i] + x[i]) * sin(2.5 + y[i]), eachindex(x))
end
δy2_hat = ex'(X, options2)
@@ -98,7 +98,7 @@ end
X[1, i] * X[1, i] - cos(c[1] * X[2, i] + c[2]) + params[1, classes[i]] for
i in 1:32
]
- sum(abs2, pred .- y) / length(y)
+ return sum(abs2, pred .- y) / length(y)
end
options = Options(;
diff --git a/test/test_mlj.jl b/test/test_mlj.jl
index d2677348..b489fd63 100644
--- a/test/test_mlj.jl
+++ b/test/test_mlj.jl
@@ -102,7 +102,7 @@ end
rng = MersenneTwister(0)
X = (b1=randn(rng, 32), b2=randn(rng, 32))
- Y = (c1=X.b1 .* X.b2, c2=X.b1 .+ X.b2)
+ Y = (c1=(X.b1 .* X.b2), c2=(X.b1 .+ X.b2))
w = ones(32)
model = MultitargetSRRegressor(; niterations=10, stop_kws...)
mach = machine(model, X, Y, w)
@@ -189,7 +189,7 @@ end
# Ensure that the hall of fame is empty:
_, hofs = mach.fitresult.state
foreach(hofs) do hof
- hof.exists .= false
+ return hof.exists .= false
end
mach.report[:fit] = SymbolicRegression.MLJInterfaceModule.full_report(
model, mach.fitresult
diff --git a/test/test_params.jl b/test/test_params.jl
index b74b5801..f72386a0 100644
--- a/test/test_params.jl
+++ b/test/test_params.jl
@@ -77,7 +77,7 @@ test_info(f::F, ::Test.Fail) where {F} = f()
macro quiet(ex)
return quote
redirect_stderr(devnull) do
- $ex
+ return $ex
end
end |> esc
end
diff --git a/test/test_recorder.jl b/test/test_recorder.jl
index 95d7b734..02db0e56 100644
--- a/test/test_recorder.jl
+++ b/test/test_recorder.jl
@@ -25,7 +25,7 @@ hall_of_fame = equation_search(
)
data = open(options.recorder_file, "r") do io
- JSON3.read(io; allow_inf=true)
+ return JSON3.read(io; allow_inf=true)
end
@test haskey(data, :options)
diff --git a/test/test_template_expression.jl b/test/test_template_expression.jl
index 8f44d521..4cef4365 100644
--- a/test/test_template_expression.jl
+++ b/test/test_template_expression.jl
@@ -7,14 +7,14 @@
options = Options(; binary_operators=(+, *, /, -), unary_operators=(sin, cos))
operators = options.operators
variable_names = (i -> "x$i").(1:3)
- x1, x2, x3 =
- (i -> Expression(Node(Float64; feature=i); operators, variable_names)).(1:3)
+ x1, x2, x3 = (i -> Expression(Node(Float64; feature=i); operators, variable_names)).(
+ 1:3
+ )
# For combining expressions to a single expression:
- my_structure(nt::NamedTuple{<:Any,<:Tuple{Vararg{<:AbstractString}}}) =
- "sin($(nt.f)) + $(nt.g)^2"
- my_structure(nt::NamedTuple{<:Any,<:Tuple{Vararg{<:AbstractVector}}}) =
- @. sin(nt.f) + nt.g^2
+ my_structure(nt::NamedTuple{<:Any,<:Tuple{Vararg{<:AbstractString}}}) = "sin($(nt.f)) + $(nt.g)^2"
+ my_structure(nt::NamedTuple{<:Any,<:Tuple{Vararg{<:AbstractVector}}}) = @. sin(nt.f) +
+ nt.g^2
my_structure(nt::NamedTuple{<:Any,<:Tuple{Vararg{<:Expression}}}) =
sin(nt.f) + nt.g * nt.g
@@ -64,14 +64,14 @@ end
operators = OperatorEnum(; binary_operators=(+, *, /, -), unary_operators=(sin, cos))
variable_names = (i -> "x$i").(1:3)
- x1, x2, x3 =
- (i -> Expression(Node(Float64; feature=i); operators, variable_names)).(1:3)
+ x1, x2, x3 = (i -> Expression(Node(Float64; feature=i); operators, variable_names)).(
+ 1:3
+ )
# For combining expressions to a single expression:
- my_structure(nt::NamedTuple{<:Any,<:Tuple{Vararg{<:AbstractString}}}) =
- "sin($(nt.f)) + $(nt.g)^2"
- my_structure(nt::NamedTuple{<:Any,<:Tuple{Vararg{<:AbstractVector}}}) =
- @. sin(nt.f) + nt.g^2
+ my_structure(nt::NamedTuple{<:Any,<:Tuple{Vararg{<:AbstractString}}}) = "sin($(nt.f)) + $(nt.g)^2"
+ my_structure(nt::NamedTuple{<:Any,<:Tuple{Vararg{<:AbstractVector}}}) = @. sin(nt.f) +
+ nt.g^2
my_structure(nt::NamedTuple{<:Any,<:Tuple{Vararg{<:Expression}}}) =
sin(nt.f) + nt.g * nt.g
@@ -101,8 +101,11 @@ end
variable_names = (i -> "x$i").(1:3)
# Create expressions
- x1, x2, x3 =
- (i -> Expression(Node(Float64; feature=i); options.operators, variable_names)).(1:3)
+ x1, x2, x3 = (
+ i -> Expression(Node(Float64; feature=i); options.operators, variable_names)
+ ).(
+ 1:3
+ )
# Test with vector inputs:
nt_vector = NamedTuple{(:f, :g1, :g2, :g3)}((1:3, 4:6, 7:9, 10:12))
@@ -136,8 +139,9 @@ end
operators =
Options(; binary_operators=(+, *, /, -), unary_operators=(sin, cos)).operators
variable_names = (i -> "x$i").(1:3)
- x1, x2, x3 =
- (i -> Expression(Node(Float64; feature=i); operators, variable_names)).(1:3)
+ x1, x2, x3 = (i -> Expression(Node(Float64; feature=i); operators, variable_names)).(
+ 1:3
+ )
my_structure(nt) = nt.f
diff --git a/test/test_units.jl b/test/test_units.jl
index a586f5e3..5dff9a40 100644
--- a/test/test_units.jl
+++ b/test/test_units.jl
@@ -131,15 +131,15 @@ end
# Check that every cos(...) which contains x1 also has complexity
has_cos(tree) =
any(get_tree(tree)) do t
- t.degree == 1 && options.operators.unaops[t.op] == cos
+ return t.degree == 1 && options.operators.unaops[t.op] == cos
end
valid_trees = [
!has_cos(member.tree) || any(
t ->
t.degree == 1 &&
- options.operators.unaops[t.op] == cos &&
- Node(Float64; feature=1) in t &&
- compute_complexity(t, options) > 1,
+ options.operators.unaops[t.op] == cos &&
+ Node(Float64; feature=1) in t &&
+ compute_complexity(t, options) > 1,
get_tree(member.tree),
) for member in dominating
]
@@ -243,10 +243,10 @@ end
best = first(filter(m::PopMember -> m.loss < 1e-7, dominating)).tree
@test compute_complexity(best, options2) == 6
@test any(get_tree(best)) do t
- t.degree == 1 && options2.operators.unaops[t.op] == cbrt
+ return t.degree == 1 && options2.operators.unaops[t.op] == cbrt
end
@test any(get_tree(best)) do t
- t.degree == 1 && options2.operators.unaops[t.op] == safe_sqrt
+ return t.degree == 1 && options2.operators.unaops[t.op] == safe_sqrt
end
@testset "With MLJ" begin
@@ -268,10 +268,10 @@ end
best_idx = findfirst(report.losses .< 1e-7)::Int
@test report.complexities[best_idx] <= 6
@test any(get_tree(report.equations[best_idx])) do t
- t.degree == 1 && t.op == 2 # cbrt
+ return t.degree == 1 && t.op == 2 # cbrt
end
@test any(get_tree(report.equations[best_idx])) do t
- t.degree == 1 && t.op == 1 # safe_sqrt
+ return t.degree == 1 && t.op == 1 # safe_sqrt
end
# Prediction should have same units:
diff --git a/test/test_utils.jl b/test/test_utils.jl
index 67ceb0dc..2433476a 100644
--- a/test/test_utils.jl
+++ b/test/test_utils.jl
@@ -9,8 +9,8 @@ function simple_bottomk(x, k)
end
array_options = [
- (n=n, seed=seed, T=T) for n in (1, 5, 20, 50, 100, 1000), seed in 1:10,
- T in (Float32, Float64, Int)
+ (n=n, seed=seed, T=T) for
+ n in (1, 5, 20, 50, 100, 1000), seed in 1:10, T in (Float32, Float64, Int)
]
@testset "argmin_fast" begin
Some changes seem like new quirks, though they don’t actually affect semantics, like
- (!tree.constant) || tree.val == constant_list[node_index.val::UInt16]
+ (!tree.constant) || tree.val == constant_list[node_index.val:: UInt16]
Besides the ones mentioned in the other issues at the one in the comment above are there any other ones in the diff that are also quirks?
I'm seeing several new kinds:
open(path, "w") do io
- TOML.print(io, data)
+ return TOML.print(io, data)
end
node_sample_size =
- rand(rng, 1:(nv(base_proposal.grid_graph) - length(base_proposal.rez_assets))) - 1
+ rand(rng, 1:(nv(base_proposal.grid_graph)-length(base_proposal.rez_assets))) - 1
- known_nodes = known_nodes[.!to_purge, :]
+ known_nodes = known_nodes[.! to_purge, :]
[:FrEnode, :ToEnode] =>
(FrEnode, ToEnode) ->
(FrEnode == from.id && ToEnode == to.id) ||
- (FrEnode == to.id && ToEnode == from.id),
+ (FrEnode == to.id && ToEnode == from.id),
branches,
- branches.Foooooooo[to_impute] .=
- get_foooooooo.(branches.Voltage[to_impute], branches.Distance[to_impute])
+ branches.Foooooooo[to_impute] .= get_foooooooo.(
+ branches.Voltage[to_impute], branches.Distance[to_impute]
+ )
this one is actually more correct now since the indentation should only be applied to shortcircuit boolean expressions. In the example above it's a regular boolean expression where a true of false is returned whereas in a shortcircuit expression something else (although it can also be a boolean) is returned or done once the initial calls up to the last one are completed, i.e. a && b && return c vs a && b && c
I was reviewing the changes in v2 and found a quirk (only a single one actually, so very good job there! :wink: ), so I though I would report it here instead of opening another issue about the same.
In short I think there's unstable formatting at https://github.com/COBREXA/COBREXA.jl/blob/ed2e2601b5f1bff04f9927e660c587b1ba52286b/src/frontend/balance.jl#L68-L74 (the source is currently formatted by JuliaFormatter 1):
By default JuliaFormatter2 inserts an (slightly under-explained) empty line there, as such:
met => C.Constraint(
value = let i = stoiT.colptr[row_idx], e = stoiT.colptr[row_idx+1] - 1
C.LinearValue(idxs = stoiT.rowval[i:e], weights = stoiT.nzval[i:e])
end,
bound = C.EqualTo(b),
) for (met, row_idx, b) in zip(mets, 1:stoiT.n, bal)
OTOH, when I move the any of the let-definitions to the other line, JuliaFormatter2 seems to converge to a different formatting:
met => C.Constraint(
value = let
i = stoiT.colptr[row_idx], e = stoiT.colptr[row_idx+1] - 1
C.LinearValue(idxs = stoiT.rowval[i:e], weights = stoiT.nzval[i:e])
end,
bound = C.EqualTo(b),
) for (met, row_idx, b) in zip(mets, 1:stoiT.n, bal)
I.e., both of these seem to be accepted as "valid" and stable by JF, while the latter is clearly better.
(Note: This is with version from current dl/v2-fixes branch.)
PS @kenahoo I'm seeing similar as you do but since these seem consistent and converge to a single style I thought these are actually on purpose and OKish. In short it seems to me that 1] stuff in square brackets is no longer compacted by removing spaces around operators by default 2] stuff in range expressions with : is now compacted even outside of square brackets.
@exaexa these are actually different in terms of how they scope the variables; see https://docs.julialang.org/en/v1/manual/variables-and-scoping/#Let-Blocks.
Putting a variable assignment on the same line as a let is different than having the assignment happen inside the block.
Best summarised like this:
@MilesCranmer
@exaexa these are actually different in terms of how they scope the variables; see https://docs.julialang.org/en/v1/manual/variables-and-scoping/#Let-Blocks.
Putting a variable assignment on the same line as a
letis different than having the assignment happen inside the block.
EDIT: deleted the previous speculation, apologies for the noise, thanks for explanation.
Still, is there an explanation for the blank line in this format?
:flux_stoichiometry^C.ConstraintTree(
met => C.Constraint(
- value = let i = stoiT.colptr[row_idx],
- e = stoiT.colptr[row_idx+1] - 1
+ value = let i = stoiT.colptr[row_idx], e = stoiT.colptr[row_idx+1] - 1
+
C.LinearValue(idxs = stoiT.rowval[i:e], weights = stoiT.nzval[i:e])
end,
bound = C.EqualTo(b),
) for (met, row_idx, b) in zip(mets, 1:stoiT.n, bal)
)
I’m assuming it’s just to help disambiguate between
let x = 1,
y = 2
end
Where the , actually puts the y onto the same line as the let, and
let x = 1
y = 2
end
where the y is on a different line.
So I could see how the
let x = 1
y = 2
end
Is more clear about this distinction
@MilesCranmer good point, it's probably some logic failure around the decision on when to add the extra empty line there. I guess the logic there is subtle.
While testing the gazillion of possible combinations I noticed that v2 doesn't add spaces around the equalsigns in let in this case (and generally around equalsigns), which might also be worth checking:
let x=very(long, thing, to, fit, on, one, line), # the line gets split here
y = another(long, thing, to, fit, easily, to, a, single, lone)
# this empty line is inserted just right
z = x(y(z, also, quite, long, not, sure, how, much, or, what))
end
Anyway I found that the empty line in the following shortened snipped is actually retained only if remove_extra_newlines is off. So I assume some weird hardly explicable interaction.
let x=very(long, thing, to, fit, on, one, line), y = another(long, thing, to, fit)
z = x(y(z, also, quite, long, not, sure, how, much, or, what))
end
Maybe, is there some option to just ignore all source indentation and formatting and reconstruct the files from a completely space-less AST?
Hm I'm not sure about that one
https://github.com/domluna/JuliaFormatter.jl/blob/master/src/styles/default/pretty.jl#L1364-L1366
this is the section that adds the newline. It's added there to let to help differentiate the "let" arguments from the "let" body. This also applies to for loops. I personally don't have a preference either way.
function n_for!(
ds::AbstractStyle,
fst::FST,
s::State,
lineage::Vector{Tuple{FNode,Union{Nothing,Metadata}}},
)
style = getstyle(ds)
nested = false
nodes = fst.nodes::Vector{FST}
for (i, n) in enumerate(nodes)
if n.typ === NEWLINE && nodes[i+1].typ === Block
s.line_offset = nodes[i+1].indent
elseif n.typ === NOTCODE && nodes[i+1].typ === Block
s.line_offset = nodes[i+1].indent
elseif n.typ === NEWLINE
s.line_offset = fst.indent
elseif n.typ === PLACEHOLDER && nested
fst[i] = Newline(; length = n.len, nest_behavior = AllowNestButDontRemove)
s.line_offset = fst.indent
else
n.extra_margin = fst.extra_margin
nested |= nest!(style, n, s, lineage)
end
end
return nested
end
function n_let!(
ds::AbstractStyle,
fst::FST,
s::State,
lineage::Vector{Tuple{FNode,Union{Nothing,Metadata}}},
)
n_for!(ds, fst, s, lineage)
end
The extra line is only added if the arguments or body is nested, which in the example above doesn't seem to be the case so that might be a bug.
https://github.com/domluna/JuliaFormatter.jl/blob/master/src/print.jl#L189-L194
This is where remove extra newlines is used. It's possible there is some odd interaction here since it doesn't factor in Newline() nodes, meaning if there is a Newline and a Notcode immediately follows then perhaps there will still be extra newlines.
@domluna thanks! I'm actually not super interested in whether the newline is or isn't there, the main concern for me is to make sure that the input spaces (outside strings&comments ofc, and [edit:] the explicit empty-line spaces) have no effect on the output. This looked like it's somewhat input-spacing-dependent, with the newline removal option it looks OK.
Btw it might be super useful for people who use this for technical purposes (commit cleaning in my case) to have the "ignore all input spaces" option explicitly available.