diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 00000000..cb088c8a
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,3 @@
+{
+ "dotnet.defaultSolution": "Tunny.sln"
+}
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2e9efbb9..8d3eac33 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,20 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p
Please see [here](https://github.com/hrntsm/Tunny/releases) for the data released for each version.
+## [v0.8.2] -2023-09-04
+
+### Changed
+
+- Stopped putting built files together in gha.
+ - Because some people did not work in some environments
+- If the objective function contains null for 10 consecutive times, optimization is stopped.
+- Update python lib
+ - bump up optuna v3.3 & optuna-dashboard v0.12.0 & some library
+
+### Fixed
+
+- NSGA-III supports constraints, but was getting a message that constraints are not taken into account
+
## [v0.8.1] -2023-07-30
### Added
diff --git a/PYTHON_PACKAGE_LICENSES b/PYTHON_PACKAGE_LICENSES
index dd7d9fed..d6870bf6 100644
--- a/PYTHON_PACKAGE_LICENSES
+++ b/PYTHON_PACKAGE_LICENSES
@@ -10,39 +10,38 @@ summary
| Name | Version | License |
|-------------------|---------|---------------------------------------------------|
| Mako | 1.2.4 | MIT License |
-| MarkupSafe | 2.1.2 | BSD License |
+| MarkupSafe | 2.1.3 | BSD License |
| PyYAML | 6.0 | MIT License |
-| SQLAlchemy | 1.4.46 | MIT License |
-| alembic | 1.9.2 | MIT License |
+| SQLAlchemy | 2.0.20 | MIT License |
+| alembic | 1.12.0 | MIT License |
| botorch | 0.7.3 | MIT License |
-| bottle | 0.12.23 | MIT License |
-| cmaes | 0.9.1 | MIT License |
+| bottle | 0.12.25 | MIT License |
+| cmaes | 0.10.0 | MIT License |
| colorama | 0.4.6 | BSD License |
| colorlog | 6.7.0 | MIT License |
| gpytorch | 1.9.0 | MIT |
-| greenlet | 2.0.1 | MIT License |
-| joblib | 1.2.0 | BSD License |
+| greenlet | 2.0.2 | MIT License |
+| joblib | 1.3.2 | BSD License |
| linear-operator | 0.2.0 | MIT |
| multipledispatch | 0.6.0 | BSD |
-| numpy | 1.24.1 | BSD License |
+| numpy | 1.25.2 | BSD License |
| opt-einsum | 3.3.0 | MIT |
-| optuna | 3.2.0 | MIT License |
-| optuna-dashboard | 0.10.2 | MIT License |
-| packaging | 23.0 | Apache Software License; BSD License |
+| optuna | 3.3.0 | MIT License |
+| optuna-dashboard | 0.12.0 | MIT License |
+| packaging | 23.1 | Apache Software License; BSD License |
| pip | 23.0 | MIT License |
| pip-licenses | 4.0.3 | MIT License |
-| plotly | 5.9.0 | MIT |
-| prettytable | 3.6.0 | BSD License |
+| plotly | 5.16.1 | MIT |
| pyro-api | 0.1.2 | Apache Software License |
| pyro-ppl | 1.8.4 | Apache Software License |
-| scikit-learn | 1.2.0 | BSD License |
-| scipy | 1.10.0 | BSD License |
+| scikit-learn | 1.3.0 | BSD License |
+| scipy | 1.11.2 | BSD License |
| setuptools | 65.5.0 | MIT License |
| six | 1.16.0 | MIT License |
-| tenacity | 8.1.0 | Apache Software License |
-| threadpoolctl | 3.1.0 | BSD License |
+| tenacity | 8.2.3 | Apache Software License |
+| threadpoolctl | 3.2.0 | BSD License |
| torch | 1.13.1 | BSD License |
-| tqdm | 4.64.1 | MIT License; Mozilla Public License 2.0 (MPL 2.0) |
+| tqdm | 4.66.1 | MIT License; Mozilla Public License 2.0 (MPL 2.0) |
| typing_extensions | 4.4.0 | Python Software Foundation License |
| wcwidth | 0.2.6 | MIT License |
diff --git a/Tunny/FodyWeavers.xml b/Tunny/FodyWeavers.xml
deleted file mode 100644
index 22df45d6..00000000
--- a/Tunny/FodyWeavers.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
- System.Data.SQLite
-
-
-
\ No newline at end of file
diff --git a/Tunny/Lib/requirements.txt b/Tunny/Lib/requirements.txt
index c4e2bb26..ab578c08 100644
--- a/Tunny/Lib/requirements.txt
+++ b/Tunny/Lib/requirements.txt
@@ -1,31 +1,31 @@
-alembic==1.9.2
+alembic==1.12.0
botorch==0.7.3
-bottle==0.12.23
-cmaes==0.9.1
+bottle==0.12.25
+cmaes==0.10.0
colorama==0.4.6
colorlog==6.7.0
gpytorch==1.9.0
-greenlet==2.0.1
-joblib==1.2.0
+greenlet==2.0.2
+joblib==1.3.2
linear-operator==0.2.0
Mako==1.2.4
-MarkupSafe==2.1.2
+MarkupSafe==2.1.3
multipledispatch==0.6.0
-numpy==1.24.1
+numpy==1.25.2
opt-einsum==3.3.0
-optuna==3.2.0
-optuna-dashboard==0.10.2
-packaging==23.0
-plotly==5.9.0
+optuna==3.3.0
+optuna-dashboard==0.12.0
+packaging==23.1
+plotly==5.16.1
pyro-api==0.1.2
pyro-ppl==1.8.4
-PyYAML==6.0
-scikit-learn==1.2.0
-scipy==1.10.0
+PyYAML==6.0.1
+scikit-learn==1.3.0
+scipy==1.11.2
six==1.16.0
-SQLAlchemy==1.4.46
-tenacity==8.1.0
-threadpoolctl==3.1.0
+SQLAlchemy==2.0.20
+tenacity==8.2.3
+threadpoolctl==3.2.0
torch==1.13.1
-tqdm==4.64.1
-typing_extensions==4.4.0
+tqdm==4.66.1
+typing_extensions==4.7.1
diff --git a/Tunny/Solver/Algorithm.cs b/Tunny/Solver/Algorithm.cs
index b1612a99..2ed62a73 100644
--- a/Tunny/Solver/Algorithm.cs
+++ b/Tunny/Solver/Algorithm.cs
@@ -5,6 +5,7 @@
using System.Linq;
using System.Reflection;
using System.Text;
+using System.Windows.Forms;
using Python.Runtime;
@@ -202,7 +203,7 @@ private void RunOptimize(RunOptimizeSettings optSet, out double[] xTest, out Eva
while (true)
{
- if (CheckOptimizeComplete(optSet.NTrials, optSet.Timeout, trialNum, startTime))
+ if (result == null || CheckOptimizeComplete(optSet.NTrials, optSet.Timeout, trialNum, startTime))
{
break;
}
@@ -223,7 +224,7 @@ private void RunHumanInTheLoopOptimize(RunOptimizeSettings optSet, int nBatch, o
while (true)
{
- if (CheckOptimizeComplete(optSet.NTrials, optSet.Timeout, trialNum, startTime))
+ if (result == null || CheckOptimizeComplete(optSet.NTrials, optSet.Timeout, trialNum, startTime))
{
break;
}
@@ -244,9 +245,8 @@ private EvaluatedGHResult RunSingleOptimizeStep(RunOptimizeSettings optSet, doub
dynamic trial = optSet.Study.ask();
var result = new EvaluatedGHResult();
- //TODO: Is this the correct way to handle the case of null?
int nullCount = 0;
- while (nullCount < 10)
+ while (true)
{
for (int j = 0; j < Variables.Count; j++)
{
@@ -259,7 +259,17 @@ private EvaluatedGHResult RunSingleOptimizeStep(RunOptimizeSettings optSet, doub
result = EvalFunc(pState, progress);
optSet.HumanInTheLoop?.SaveNote(optSet.Study, trial, result.ObjectiveImages);
- if (result.ObjectiveValues.Contains(double.NaN))
+ if (nullCount >= 10)
+ {
+ TunnyMessageBox.Show(
+ "The objective function returned NaN 10 times in a row. Tunny terminates the optimization. Please check the objective function.",
+ "Tunny",
+ MessageBoxButtons.OK,
+ MessageBoxIcon.Error
+ );
+ return null;
+ }
+ else if (result.ObjectiveValues.Contains(double.NaN))
{
trial = optSet.Study.ask();
nullCount++;
@@ -483,7 +493,7 @@ private dynamic SetSamplerSettings(int samplerType, dynamic optuna, bool hasCons
default:
throw new ArgumentException("Unknown sampler type");
}
- if (samplerType > 3 && hasConstraints)
+ if (samplerType > 4 && hasConstraints)
{
TunnyMessageBox.Show("Only TPE, GP and NSGA support constraints. Optimization is run without considering constraints.", "Tunny");
}
diff --git a/Tunny/Tunny.csproj b/Tunny/Tunny.csproj
index f2a050b4..c0efd8e7 100644
--- a/Tunny/Tunny.csproj
+++ b/Tunny/Tunny.csproj
@@ -2,7 +2,7 @@
net48
- 0.8.1
+ 0.8.2
Tunny
Tunny is an optimization component wrapped in optuna.
.gha
@@ -30,10 +30,6 @@
-
- runtime; build; native; contentfiles; analyzers; buildtransitive
- all
-
diff --git a/Tunny/TunnyInfo.cs b/Tunny/TunnyInfo.cs
index 536b3ab8..5f0b85ee 100644
--- a/Tunny/TunnyInfo.cs
+++ b/Tunny/TunnyInfo.cs
@@ -10,7 +10,7 @@ namespace Tunny
public class Tunny : GH_AssemblyInfo
{
public override string Name => "Tunny";
- public override string Version => "0.8.1";
+ public override string Version => "0.8.2";
public override Bitmap Icon => Resource.TunnyIcon;
public override string Description => "Tunny is an optimization component wrapped in optuna.";
public override Guid Id => new Guid("01E58960-AFAA-48FF-BC90-174FDC4A9D64");
diff --git a/Yak/manifest.yml b/Yak/manifest.yml
index 14797215..2dd78a53 100644
--- a/Yak/manifest.yml
+++ b/Yak/manifest.yml
@@ -1,6 +1,6 @@
---
name: Tunny
-version: 0.8.1
+version: 0.8.2
authors:
- hrntsm
description: 🐟Tunny🐟 is Grasshopper's optimization component using Optuna, an open source hyperparameter auto-optimization framework. Tunny includes some features such as multi-objective optimization with constraint, Quasi-MonteCarlo sampling and dashboards for results analysis etc.