Skip to content

Commit

Permalink
Remove Random requirement in tests for determinism
Browse files Browse the repository at this point in the history
  • Loading branch information
AP6YC committed Aug 12, 2021
1 parent 9a57d41 commit a99edf5
Show file tree
Hide file tree
Showing 6 changed files with 32 additions and 67 deletions.
3 changes: 1 addition & 2 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,8 @@ julia = "1"

[extras]
DelimitedFiles = "8bb1440f-4735-579b-a4ab-409b98df4dab"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
SafeTestsets = "1bc83da4-3b8d-516f-aca4-4fe02f6d838f"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
test = ["Test", "SafeTestsets", "DelimitedFiles", "Random"]
test = ["Test", "SafeTestsets", "DelimitedFiles"]
3 changes: 0 additions & 3 deletions examples/art/dvfa_supervised.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,6 @@ Description:
using Logging
using DelimitedFiles
using AdaptiveResonance
using Random

Random.seed!(0)

# Set the log level
LogLevel(Logging.Info)
Expand Down
6 changes: 1 addition & 5 deletions examples/art/gnfa.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,11 @@ using Logging

# Set the log level
LogLevel(Logging.Info)
@info "GNFA Testing"

# Auxiliary generic functions for loading data, etc.
include("../../test/test_utils.jl")

@info "GNFA Testing"

# Standardize the seed
Random.seed!(0)

# GNFA train and test
opts = opts_GNFA(rho=0.5)
my_gnfa = GNFA(opts)
Expand Down
40 changes: 15 additions & 25 deletions test/test_ddvfa.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,10 @@ function tt_ddvfa(opts::opts_DDVFA, train_x::Array)
end # tt_ddvfa(opts::opts_DDVFA, train_x::Array)

@testset "DDVFA Sequential" begin
# Set the logging level to Info and standardize the random seed
LogLevel(Logging.Info)
Random.seed!(0)

@info "------- DDVFA Sequential -------"

# Load the data and test across all supervised modules
data = load_iris("../data/Iris.csv")
# data = load_iris("../data/Iris.csv")

# Initialize the ART module
art = DDVFA()
Expand Down Expand Up @@ -71,14 +67,10 @@ end # tt_ddvfa(opts::opts_DDVFA, train_x::Array)
end

@testset "DDVFA Supervised" begin
# Set the logging level to Info and standardize the random seed
LogLevel(Logging.Info)
Random.seed!(0)

@info "------- DDVFA Supervised -------"

# Load the data and test across all supervised modules
data = load_iris("../data/Iris.csv")
# data = load_iris("../data/Iris.csv")

# Train and classify
art = DDVFA()
Expand All @@ -104,9 +96,6 @@ end
end

@testset "DDVFA" begin
# Set the log level
LogLevel(Logging.Info)

# Parse the data
data_file = "../data/art_data_rng.csv"
train_x = readdlm(data_file, ',')
Expand All @@ -132,12 +121,11 @@ end # @testset "DDVFA"

@testset "GNFA" begin
@info "------- GNFA Testing -------"
Random.seed!(0)

# GNFA train and test
my_gnfa = GNFA()
# data = load_am_data(200, 50)
data = load_iris("../data/Iris.csv")
# data = load_iris("../data/Iris.csv")
local_complement_code = AdaptiveResonance.complement_code(data.train_x)
train!(my_gnfa, local_complement_code)

Expand All @@ -163,24 +151,24 @@ end # @testset "DDVFA"
# Declare the true activation and match magnitudes
truth = Dict(
"single" => Dict(
"T" => 0.9988445088278305,
"M" => 2.591300556893253
"T" => 0.9988714513100155,
"M" => 2.6532834139109758
),
"average" => Dict(
"T" => 0.41577750468594143,
"M" => 1.322517210029363
"T" => 0.33761483787933894,
"M" => 1.1148764060015297
),
"complete" => Dict(
"T" => 0.04556971777638373,
"M" => 0.13166315262229716
"T" => 0.018234409874338647,
"M" => 0.07293763949735459
),
"median" => Dict(
"T" => 0.3312241307874298,
"M" => 1.3248965231497192
"T" => 0.2089217851518073,
"M" => 0.835687140607229
),
"weighted" => Dict(
"T" => 0.533208585217186,
"M" => 1.3855766656866793
"T" => 0.5374562506748786,
"M" => 1.4396083090159748
),
"centroid" => Dict(
"T" => 0.0,
Expand All @@ -199,7 +187,9 @@ end # @testset "DDVFA"
end

# Check the error handling of the similarity function
# Access the wrong similarity metric keyword ("asdf")
@test_throws ErrorException AdaptiveResonance.similarity("asdf", my_gnfa, "T", local_sample, my_gnfa.opts.gamma_ref)
# Access the wrong output function ("A")
@test_throws ErrorException AdaptiveResonance.similarity("centroid", my_gnfa, "A", local_sample, my_gnfa.opts.gamma_ref)

end # @testset "GNFA"
37 changes: 12 additions & 25 deletions test/test_sets.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,22 +2,19 @@ using AdaptiveResonance
using Test
using Logging
using DelimitedFiles
using Random
# using Random

# Set the log level
LogLevel(Logging.Info)

# Auxiliary generic functions for loading data, etc.
include("test_utils.jl")

@testset "common.jl" begin
# Set the logging level to Info and standardize the random seed
LogLevel(Logging.Info)
Random.seed!(0)

# Load the data and test across all supervised modules
data = load_iris("../data/Iris.csv")
# Load the data and test across all supervised modules
data = load_iris("../data/Iris.csv")

@testset "common.jl" begin
@info "------- Common Code Tests -------"
# Example arrays
three_by_two = [1 2; 3 4; 5 6]

Expand All @@ -35,11 +32,10 @@ include("test_utils.jl")
@test_logs (:warn,) AdaptiveResonance.data_setup!(dc3, three_by_two)
bad_config = DataConfig(1, 0, 3)
@test_throws ErrorException linear_normalization(three_by_two, config=bad_config)

end
end # @testset "common.jl"

@testset "constants.jl" begin
@info "Constants testing"
@info "------- Constants Tests -------"
ddvfa_methods = [
"single",
"average",
Expand All @@ -49,16 +45,9 @@ end
"centroid"
]
@test AdaptiveResonance.DDVFA_METHODS == ddvfa_methods
end
end # @testset "constants.jl"

@testset "DVFA.jl" begin
# Set the logging level to Info and standardize the random seed
LogLevel(Logging.Info)
Random.seed!(0)

# Load the data and test across all supervised modules
data = load_iris("../data/Iris.csv")

@info "------- DVFA Unsupervised -------"

# Train and classify
Expand Down Expand Up @@ -101,15 +90,13 @@ end # @testset "DDVFA.jl"
end # @testset "AdaptiveResonance.jl"

@testset "ARTMAP.jl" begin
# Set the logging level to Info and standardize the random seed
LogLevel(Logging.Info)
Random.seed!(0)
# Declare the baseline performance for all modules
perf_baseline = 0.7

# Load the data and test across all supervised modules
data = load_iris("../data/Iris.csv")
# Iterate over each artmap module
for art in [SFAM, DAM]
perf = tt_supervised(art(), data)
@test perf > 0.8
@test perf > perf_baseline
end
end # @testset "ARTMAP.jl"

Expand Down
10 changes: 3 additions & 7 deletions test/test_utils.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
using Random
# using Random
using DelimitedFiles

"""
Expand Down Expand Up @@ -86,12 +86,8 @@ function load_iris(data_path::String ; split_ratio::Real = 0.8)
# Julia is column-major, so use columns for features
raw_x = permutedims(raw_x)

# Shuffle the data and targets
ind_shuffle = Random.randperm(n_samples)
x = raw_x[:, ind_shuffle]
y = raw_y[ind_shuffle]

data = DataSplit(x, y, split_ratio)
# Create the datasplit object
data = DataSplit(raw_x, raw_y, split_ratio)

return data
end # load_iris(data_path::String ; split_ratio::Real = 0.8)

2 comments on commit a99edf5

@AP6YC
Copy link
Owner Author

@AP6YC AP6YC commented on a99edf5 Aug 12, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator register

Release notes:

Random.jl dependencies have been removed from tests and examples for determinism across all platforms where the same random seed may produce different results.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/42738

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v0.3.5 -m "<description of version>" a99edf5e9df4f959617a62bb358b7f802d76a6fa
git push origin v0.3.5

Also, note the warning: Version 0.3.5 skips over 0.3.4
This can be safely ignored. However, if you want to fix this you can do so. Call register() again after making the fix. This will update the Pull request.

Please sign in to comment.