diff --git a/Project.toml b/Project.toml index e5e63f0a..74d29605 100644 --- a/Project.toml +++ b/Project.toml @@ -2,7 +2,7 @@ name = "AdaptiveResonance" uuid = "3d72adc0-63d3-4141-bf9b-84450dd0395b" authors = ["Sasha Petrenko"] description = "A Julia package for Adaptive Resonance Theory (ART) algorithms." -version = "0.3.5" +version = "0.3.6" [deps] Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b" diff --git a/README.md b/README.md index 9f9924ea..4e61372a 100644 --- a/README.md +++ b/README.md @@ -57,6 +57,7 @@ Please read the [documentation](https://ap6yc.github.io/AdaptiveResonance.jl/dev - [Contents](#contents) - [Overview](#overview) - [Installation](#installation) + - [Quickstart](#quickstart) - [Implemented Modules](#implemented-modules) - [Structure](#structure) - [Contributing](#contributing) @@ -73,7 +74,8 @@ Adaptive Resonance Theory (ART) is a neurocognitive theory of how recurrent cell As a theory, it provides coherent and consistent explanations of how real neural networks learn patterns through competition, and it predicts the phenomena of attention and expectation as central to learning. In engineering, the theory has been applied to a myriad of algorithmic models for unsupervised machine learning, though it has been extended to supervised and reinforcement learning frameworks. This package provides implementations of many of these algorithms in Julia for both scientific research and engineering applications. -A quickstart is provided in [Installation](#installation), while detailed usage and examples are provided in the [documentation](https://ap6yc.github.io/AdaptiveResonance.jl/dev/). +Basic installation is outlined in [Installation](#installation), while a quickstart is provided in [Quickstart](#quickstart). +Detailed usage and examples are provided in the [documentation](https://ap6yc.github.io/AdaptiveResonance.jl/dev/). ## Installation @@ -97,6 +99,34 @@ You may also add the package directly from GitHub to get the latest changes betw ] add https://github.com/AP6YC/AdaptiveResonance.jl ``` +## Quickstart + +Load the module with + +```julia +using AdaptiveResonance +``` + +The stateful information of ART modules are structs with default constructures such as + +```julia +art = DDVFA() +``` + +You can pass module-specific options during construction with keyword arguments such as + +```julia +art = DDVFA(rho_ub=0.75, rho_lb=0.4) +``` + +For more advanced users, options for the modules are contained in `Parameters.jl` structs. +These options can be passed keyword arguments before instantiating the model: + +```julia +opts = opts_DDVFA(rho_ub=0.75, rho_lb=0.4) +art = DDVFA(opts) +``` + ## Implemented Modules This project has implementations of the following ART (unsupervised) and ARTMAP (supervised) modules: diff --git a/test/test_sets.jl b/test/test_sets.jl index 18f003e0..15061c82 100644 --- a/test/test_sets.jl +++ b/test/test_sets.jl @@ -94,6 +94,23 @@ end # @testset "AdaptiveResonance.jl" @info "--------- END TRAIN TEST ---------" end +@testset "kwargs" begin + @info "--------- KWARGS TEST ---------" + + arts = [ + DVFA, + DDVFA, + SFAM, + DAM + ] + + for art in arts + art_module = art(alpha=1e-3, display=false) + end + + @info "--------- END KWARGS TEST ---------" +end + @testset "DVFA.jl" begin @info "------- DVFA Unsupervised -------" diff --git a/test/test_utils.jl b/test/test_utils.jl index b533ce37..ad03958f 100644 --- a/test/test_utils.jl +++ b/test/test_utils.jl @@ -81,7 +81,10 @@ function train_test_art(art::ARTModule, data::DataSplit; supervised::Bool=false, error("Incompatible ART module passed for testing") end - @info "$(typeof(art)): performance is $perf" + # If the performance is not a NaN (potentially unsupervsied), then display perf + if !isnan(perf) + @info "$(typeof(art)): performance is $perf" + end return perf end