diff --git a/.circleci/config.yml b/.circleci/config.yml index b9082a0a0c..a554cfb8ef 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -33,8 +33,6 @@ jobs: keys: # Parameterize the cache so that different python versions can get different versions of the packages - v1-dependencies-py<< parameters.pyversion >>-{{ checksum "python_deps.txt" }} - # fallback to using the latest cache if no exact match is found - - v1-dependencies-py<< parameters.pyversion >>- - run: name: Install Dependencies @@ -121,7 +119,97 @@ jobs: . venv/bin/activate pre-commit run --hook-stage manual markdown-link-check --all-files + protobuf_generation_check: + docker: + - image: circleci/python:3.7.3 + working_directory: ~/repo/ + + steps: + - checkout + - run: + name: Combine proto files for caching + command: cat protobuf-definitions/proto/mlagents/envs/communicator_objects/*.proto > /tmp/proto_deps.txt + + - restore_cache: + keys: + - v1-protobuf-gen-dependencies-{{ checksum "/tmp/proto_deps.txt" }} + - v1-protobuf-gen-dependencies- + + - run: + name: Install Dependencies + command: | + sudo apt-get install nuget + nuget install Grpc.Tools -Version 1.14.1 -OutputDirectory protobuf-definitions/ + python3 -m venv venv + . venv/bin/activate + pip install --upgrade pip + pip install grpcio-tools==1.13.0 --progress-bar=off + pip install mypy-protobuf==1.16.0 --progress-bar=off + - save_cache: + paths: + - ./venv + key: v1-protobuf-gen-dependencies-{{ checksum "/tmp/proto_deps.txt" }} + + - run: + name: Generate protobufs + command: | + . venv/bin/activate + cd protobuf-definitions + chmod +x Grpc.Tools.1.14.1/tools/linux_x64/protoc + chmod +x Grpc.Tools.1.14.1/tools/linux_x64/grpc_csharp_plugin + COMPILER=Grpc.Tools.1.14.1/tools/linux_x64 ./make.sh + CS_PROTO_PATH=UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects + git diff --exit-code --quiet -- :/ ":(exclude,top)$CS_PROTO_PATH/*.meta" \ + || { GIT_ERR=$?; echo "protobufs need to be regenerated, apply the patch uploaded to artifacts."; \ + echo "Apply the patch with the command: git apply proto.patch"; \ + git diff -- :/ ":(exclude,top)$CS_PROTO_PATH/*.meta" > /tmp/proto.patch; \ + exit $GIT_ERR; } + - store_artifacts: + path: /tmp/proto.patch + destination: proto.patch + + deploy: + parameters: + directory: + type: string + description: Local directory to use for publishing (e.g. ml-agents) + docker: + - image: circleci/python:3.6 + steps: + - checkout + - run: + name: install python dependencies + command: | + python3 -m venv venv + . venv/bin/activate + pip install --upgrade pip + pip install setuptools wheel twine + - run: + name: verify git tag vs. version + command: | + python3 -m venv venv + . venv/bin/activate + cd << parameters.directory >> + python setup.py verify + - run: + name: create packages + command: | + . venv/bin/activate + cd << parameters.directory >> + python setup.py sdist + python setup.py bdist_wheel + - run: + name: upload to pypi + # To upload to test, just add the following flag to twine upload: + # --repository-url https://test.pypi.org/legacy/ + # and change the username to "mlagents-test" + command: | + . venv/bin/activate + cd << parameters.directory >> + twine upload -u mlagents -p $PYPI_PASSWORD dist/* + workflows: + version: 2 workflow: jobs: - build_python: @@ -137,3 +225,28 @@ workflows: # Test python 3.7 with the newest supported versions pip_constraints: test_constraints_max_version.txt - markdown_link_check + - protobuf_generation_check + - deploy: + name: deploy ml-agents-envs + directory: ml-agents-envs + filters: + tags: + only: /[0-9]+(\.[0-9]+)*(\.dev[0-9]+)*/ + branches: + ignore: /.*/ + - deploy: + name: deploy ml-agents + directory: ml-agents + filters: + tags: + only: /[0-9]+(\.[0-9]+)*(\.dev[0-9]+)*/ + branches: + ignore: /.*/ + - deploy: + name: deploy gym-unity + directory: gym-unity + filters: + tags: + only: /[0-9]+(\.[0-9]+)*(\.dev[0-9]+)*/ + branches: + ignore: /.*/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6aec5d4253..b5c74e5d09 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,6 +3,12 @@ repos: rev: 19.3b0 hooks: - id: black + exclude: > + (?x)^( + .*_pb2.py| + .*_pb2_grpc.py + )$ + - repo: https://github.com/pre-commit/mirrors-mypy rev: v0.720 hooks: @@ -20,6 +26,7 @@ repos: name: mypy-gym-unity files: "gym-unity/.*" args: [--ignore-missing-imports, --disallow-incomplete-defs] + - repo: https://github.com/pre-commit/pre-commit-hooks rev: v2.2.3 hooks: @@ -27,7 +34,8 @@ repos: exclude: > (?x)^( .*cs.meta| - .*.css + .*.css| + .*.meta )$ args: [--fix=lf] - id: flake8 @@ -36,6 +44,16 @@ repos: .*_pb2.py| .*_pb2_grpc.py )$ + additional_dependencies: [flake8-comprehensions] + - id: trailing-whitespace + name: trailing-whitespace-markdown + types: [markdown] + +- repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.4.1 # Use the ref you want to point at + hooks: + - id: python-check-mock-methods + # "Local" hooks, see https://pre-commit.com/#repository-local-hooks - repo: local hooks: @@ -51,3 +69,8 @@ repos: exclude: ".*localized.*" # Only run manually, e.g. pre-commit run --hook-stage manual markdown-link-check stages: [manual] + - id: validate-versions + name: validate library versions + language: script + entry: utils/validate_versions.py + files: ".*/setup.py" diff --git a/.yamato/csharp-tests.yml b/.yamato/csharp-tests.yml new file mode 100644 index 0000000000..c34d3b7c00 --- /dev/null +++ b/.yamato/csharp-tests.yml @@ -0,0 +1,30 @@ +test_editors: + - version: 2017.4 + - version: 2018.4 + - version: 2019.3 +--- +{% for editor in test_editors %} +test_mac_editmode_{{ editor.version }}: + name: Test Mac EditMode {{ editor.version }} + agent: + type: Unity::VM::osx + image: ml-agents/ml-agents-bokken-mac:release + flavor: i1.small + variables: + UNITY_VERSION: {{ editor.version }} + commands: + - ./run-tests-editmode-osx-editor.sh + triggers: + branches: + only: + - "/develop-.*/" + targets: + only: + - "develop" + pull_requests: + - targets: + only: + - "master" + - "/release-.*/" + - "/hotfix-.*/" +{% endfor %} diff --git a/.yamato/standalone-build-test.yml b/.yamato/standalone-build-test.yml new file mode 100644 index 0000000000..e2a9147eb2 --- /dev/null +++ b/.yamato/standalone-build-test.yml @@ -0,0 +1,30 @@ +test_editors: + - version: 2017.4 + - version: 2018.4 + - version: 2019.3 +--- +{% for editor in test_editors %} +test_mac_standalone_{{ editor.version }}: + name: Test Mac Standalone {{ editor.version }} + agent: + type: Unity::VM::osx + image: ml-agents/ml-agents-bokken-mac:release + flavor: i1.small + variables: + UNITY_VERSION: {{ editor.version }} + commands: + - ./run-standalone-build-osx.sh + triggers: + branches: + only: + - "/develop-.*/" + targets: + only: + - "develop" + pull_requests: + - targets: + only: + - "master" + - "/release-.*/" + - "/hotfix-.*/" +{% endfor %} \ No newline at end of file diff --git a/README.md b/README.md index 0282f8f651..99edd0fd26 100644 --- a/README.md +++ b/README.md @@ -29,7 +29,6 @@ developer communities. * Support for multiple environment configurations and training scenarios * Train memory-enhanced agents using deep reinforcement learning * Easily definable Curriculum Learning and Generalization scenarios -* Broadcasting of agent behavior for supervised learning * Built-in support for Imitation Learning * Flexible agent control with On Demand Decision Making * Visualizing network outputs within the environment diff --git a/SURVEY.md b/SURVEY.md index a228d866fe..9c36726d2f 100644 --- a/SURVEY.md +++ b/SURVEY.md @@ -1,5 +1,5 @@ # Unity ML-Agents Toolkit Survey -Your opinion matters a great deal to us. Only by hearing your thoughts on the Unity ML-Agents Toolkit can we continue to improve and grow. Please take a few minutes to let us know about it. +Your opinion matters a great deal to us. Only by hearing your thoughts on the Unity ML-Agents Toolkit can we continue to improve and grow. Please take a few minutes to let us know about it. -[Fill out the survey](https://goo.gl/forms/qFMYSYr5TlINvG6f1) \ No newline at end of file +[Fill out the survey](https://goo.gl/forms/qFMYSYr5TlINvG6f1) \ No newline at end of file diff --git a/UnitySDK/Assets/Gizmos/HeuristicBrain Icon.png b/UnitySDK/Assets/Gizmos/HeuristicBrain Icon.png deleted file mode 100644 index cbf6e0344f..0000000000 Binary files a/UnitySDK/Assets/Gizmos/HeuristicBrain Icon.png and /dev/null differ diff --git a/UnitySDK/Assets/Gizmos/HeuristicBrain Icon.png.meta b/UnitySDK/Assets/Gizmos/HeuristicBrain Icon.png.meta deleted file mode 100644 index 9dcb3c7845..0000000000 --- a/UnitySDK/Assets/Gizmos/HeuristicBrain Icon.png.meta +++ /dev/null @@ -1,76 +0,0 @@ -fileFormatVersion: 2 -guid: fbee1fddc738f410ea0b37fe6301d7c0 -TextureImporter: - fileIDToRecycleName: {} - externalObjects: {} - serializedVersion: 4 - mipmaps: - mipMapMode: 0 - enableMipMap: 1 - sRGBTexture: 1 - linearTexture: 0 - fadeOut: 0 - borderMipMap: 0 - mipMapsPreserveCoverage: 0 - alphaTestReferenceValue: 0.5 - mipMapFadeDistanceStart: 1 - mipMapFadeDistanceEnd: 3 - bumpmap: - convertToNormalMap: 0 - externalNormalMap: 0 - heightScale: 0.25 - normalMapFilter: 0 - isReadable: 0 - grayScaleToAlpha: 0 - generateCubemap: 6 - cubemapConvolution: 0 - seamlessCubemap: 0 - textureFormat: 1 - maxTextureSize: 2048 - textureSettings: - serializedVersion: 2 - filterMode: -1 - aniso: -1 - mipBias: -1 - wrapU: -1 - wrapV: -1 - wrapW: -1 - nPOTScale: 1 - lightmap: 0 - compressionQuality: 50 - spriteMode: 0 - spriteExtrude: 1 - spriteMeshType: 1 - alignment: 0 - spritePivot: {x: 0.5, y: 0.5} - spritePixelsToUnits: 100 - spriteBorder: {x: 0, y: 0, z: 0, w: 0} - spriteGenerateFallbackPhysicsShape: 1 - alphaUsage: 1 - alphaIsTransparency: 0 - spriteTessellationDetail: -1 - textureType: 0 - textureShape: 1 - maxTextureSizeSet: 0 - compressionQualitySet: 0 - textureFormatSet: 0 - platformSettings: - - buildTarget: DefaultTexturePlatform - maxTextureSize: 2048 - resizeAlgorithm: 0 - textureFormat: -1 - textureCompression: 1 - compressionQuality: 50 - crunchedCompression: 0 - allowsAlphaSplitting: 0 - overridden: 0 - androidETC2FallbackOverride: 0 - spriteSheet: - serializedVersion: 2 - sprites: [] - outline: [] - physicsShape: [] - spritePackingTag: - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/Gizmos/LearningBrain Icon.png b/UnitySDK/Assets/Gizmos/LearningBrain Icon.png deleted file mode 100644 index 183c028eb7..0000000000 Binary files a/UnitySDK/Assets/Gizmos/LearningBrain Icon.png and /dev/null differ diff --git a/UnitySDK/Assets/Gizmos/LearningBrain Icon.png.meta b/UnitySDK/Assets/Gizmos/LearningBrain Icon.png.meta deleted file mode 100644 index e70fcf538a..0000000000 --- a/UnitySDK/Assets/Gizmos/LearningBrain Icon.png.meta +++ /dev/null @@ -1,76 +0,0 @@ -fileFormatVersion: 2 -guid: aff60a06d6c704801a2d09a8e72cbefc -TextureImporter: - fileIDToRecycleName: {} - externalObjects: {} - serializedVersion: 4 - mipmaps: - mipMapMode: 0 - enableMipMap: 1 - sRGBTexture: 1 - linearTexture: 0 - fadeOut: 0 - borderMipMap: 0 - mipMapsPreserveCoverage: 0 - alphaTestReferenceValue: 0.5 - mipMapFadeDistanceStart: 1 - mipMapFadeDistanceEnd: 3 - bumpmap: - convertToNormalMap: 0 - externalNormalMap: 0 - heightScale: 0.25 - normalMapFilter: 0 - isReadable: 0 - grayScaleToAlpha: 0 - generateCubemap: 6 - cubemapConvolution: 0 - seamlessCubemap: 0 - textureFormat: 1 - maxTextureSize: 2048 - textureSettings: - serializedVersion: 2 - filterMode: -1 - aniso: -1 - mipBias: -1 - wrapU: -1 - wrapV: -1 - wrapW: -1 - nPOTScale: 1 - lightmap: 0 - compressionQuality: 50 - spriteMode: 0 - spriteExtrude: 1 - spriteMeshType: 1 - alignment: 0 - spritePivot: {x: 0.5, y: 0.5} - spritePixelsToUnits: 100 - spriteBorder: {x: 0, y: 0, z: 0, w: 0} - spriteGenerateFallbackPhysicsShape: 1 - alphaUsage: 1 - alphaIsTransparency: 0 - spriteTessellationDetail: -1 - textureType: 0 - textureShape: 1 - maxTextureSizeSet: 0 - compressionQualitySet: 0 - textureFormatSet: 0 - platformSettings: - - buildTarget: DefaultTexturePlatform - maxTextureSize: 2048 - resizeAlgorithm: 0 - textureFormat: -1 - textureCompression: 1 - compressionQuality: 50 - crunchedCompression: 0 - allowsAlphaSplitting: 0 - overridden: 0 - androidETC2FallbackOverride: 0 - spriteSheet: - serializedVersion: 2 - sprites: [] - outline: [] - physicsShape: [] - spritePackingTag: - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/Gizmos/PlayerBrain Icon.png b/UnitySDK/Assets/Gizmos/PlayerBrain Icon.png deleted file mode 100644 index 28897181d9..0000000000 Binary files a/UnitySDK/Assets/Gizmos/PlayerBrain Icon.png and /dev/null differ diff --git a/UnitySDK/Assets/Gizmos/PlayerBrain Icon.png.meta b/UnitySDK/Assets/Gizmos/PlayerBrain Icon.png.meta deleted file mode 100644 index 705dc8c6b1..0000000000 --- a/UnitySDK/Assets/Gizmos/PlayerBrain Icon.png.meta +++ /dev/null @@ -1,76 +0,0 @@ -fileFormatVersion: 2 -guid: 224cd0666bb7b4477b0806fd29cf5966 -TextureImporter: - fileIDToRecycleName: {} - externalObjects: {} - serializedVersion: 4 - mipmaps: - mipMapMode: 0 - enableMipMap: 1 - sRGBTexture: 1 - linearTexture: 0 - fadeOut: 0 - borderMipMap: 0 - mipMapsPreserveCoverage: 0 - alphaTestReferenceValue: 0.5 - mipMapFadeDistanceStart: 1 - mipMapFadeDistanceEnd: 3 - bumpmap: - convertToNormalMap: 0 - externalNormalMap: 0 - heightScale: 0.25 - normalMapFilter: 0 - isReadable: 0 - grayScaleToAlpha: 0 - generateCubemap: 6 - cubemapConvolution: 0 - seamlessCubemap: 0 - textureFormat: 1 - maxTextureSize: 2048 - textureSettings: - serializedVersion: 2 - filterMode: -1 - aniso: -1 - mipBias: -1 - wrapU: -1 - wrapV: -1 - wrapW: -1 - nPOTScale: 1 - lightmap: 0 - compressionQuality: 50 - spriteMode: 0 - spriteExtrude: 1 - spriteMeshType: 1 - alignment: 0 - spritePivot: {x: 0.5, y: 0.5} - spritePixelsToUnits: 100 - spriteBorder: {x: 0, y: 0, z: 0, w: 0} - spriteGenerateFallbackPhysicsShape: 1 - alphaUsage: 1 - alphaIsTransparency: 0 - spriteTessellationDetail: -1 - textureType: 0 - textureShape: 1 - maxTextureSizeSet: 0 - compressionQualitySet: 0 - textureFormatSet: 0 - platformSettings: - - buildTarget: DefaultTexturePlatform - maxTextureSize: 2048 - resizeAlgorithm: 0 - textureFormat: -1 - textureCompression: 1 - compressionQuality: 50 - crunchedCompression: 0 - allowsAlphaSplitting: 0 - overridden: 0 - androidETC2FallbackOverride: 0 - spriteSheet: - serializedVersion: 2 - sprites: [] - outline: [] - physicsShape: [] - spritePackingTag: - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Editor/AgentEditor.cs b/UnitySDK/Assets/ML-Agents/Editor/AgentEditor.cs index a59d1e8c23..a58fc7035f 100644 --- a/UnitySDK/Assets/ML-Agents/Editor/AgentEditor.cs +++ b/UnitySDK/Assets/ML-Agents/Editor/AgentEditor.cs @@ -1,12 +1,12 @@ using UnityEngine; using UnityEditor; +using Barracuda; namespace MLAgents { -/* - This code is meant to modify the behavior of the inspector on Brain Components. - Depending on the type of brain that is used, the available fields will be modified in the inspector accordingly. -*/ + /* + This code is meant to modify the behavior of the inspector on Agent Components. + */ [CustomEditor(typeof(Agent), true)] [CanEditMultipleObjects] public class AgentEditor : Editor @@ -16,7 +16,6 @@ public override void OnInspectorGUI() var serializedAgent = serializedObject; serializedAgent.Update(); - var brain = serializedAgent.FindProperty("brain"); var actionsPerDecision = serializedAgent.FindProperty( "agentParameters.numberOfActionsBetweenDecisions"); var maxSteps = serializedAgent.FindProperty( @@ -25,59 +24,7 @@ public override void OnInspectorGUI() "agentParameters.resetOnDone"); var isOdd = serializedAgent.FindProperty( "agentParameters.onDemandDecision"); - var cameras = serializedAgent.FindProperty( - "agentParameters.agentCameras"); - var renderTextures = serializedAgent.FindProperty( - "agentParameters.agentRenderTextures"); - EditorGUILayout.PropertyField(brain); - - if (cameras.arraySize > 0 && renderTextures.arraySize > 0) - { - EditorGUILayout.HelpBox("Brain visual observations created by first getting all cameras then all render textures.", MessageType.Info); - } - - EditorGUILayout.LabelField("Agent Cameras"); - for (var i = 0; i < cameras.arraySize; i++) - { - EditorGUILayout.PropertyField( - cameras.GetArrayElementAtIndex(i), - new GUIContent("Camera " + (i + 1) + ": ")); - } - - EditorGUILayout.BeginHorizontal(); - if (GUILayout.Button("Add Camera", EditorStyles.miniButton)) - { - cameras.arraySize++; - } - - if (GUILayout.Button("Remove Camera", EditorStyles.miniButton)) - { - cameras.arraySize--; - } - - EditorGUILayout.EndHorizontal(); - - EditorGUILayout.LabelField("Agent RenderTextures"); - for (var i = 0; i < renderTextures.arraySize; i++) - { - EditorGUILayout.PropertyField( - renderTextures.GetArrayElementAtIndex(i), - new GUIContent("RenderTexture " + (i + 1) + ": ")); - } - - EditorGUILayout.BeginHorizontal(); - if (GUILayout.Button("Add RenderTextures", EditorStyles.miniButton)) - { - renderTextures.arraySize++; - } - - if (GUILayout.Button("Remove RenderTextures", EditorStyles.miniButton)) - { - renderTextures.arraySize--; - } - - EditorGUILayout.EndHorizontal(); EditorGUILayout.PropertyField( diff --git a/UnitySDK/Assets/ML-Agents/Editor/BehaviorParametersEditor.cs b/UnitySDK/Assets/ML-Agents/Editor/BehaviorParametersEditor.cs new file mode 100644 index 0000000000..d08e085059 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Editor/BehaviorParametersEditor.cs @@ -0,0 +1,78 @@ +using UnityEngine; +using UnityEditor; +using Barracuda; + +namespace MLAgents +{ + /* + This code is meant to modify the behavior of the inspector on Agent Components. + */ + [CustomEditor(typeof(BehaviorParameters))] + [CanEditMultipleObjects] + public class BehaviorParametersEditor : Editor + { + private const float k_TimeBetweenModelReloads = 2f; + // Time since the last reload of the model + private float m_TimeSinceModelReload; + // Whether or not the model needs to be reloaded + private bool m_RequireReload; + + public override void OnInspectorGUI() + { + var serializedObject = base.serializedObject; + serializedObject.Update(); + + // Drawing the Behavior Parameters + EditorGUI.BeginChangeCheck(); + EditorGUI.indentLevel++; + EditorGUILayout.PropertyField(serializedObject.FindProperty("m_BehaviorName")); + EditorGUILayout.PropertyField(serializedObject.FindProperty("m_BrainParameters"), true); + EditorGUILayout.PropertyField(serializedObject.FindProperty("m_Model"), true); + EditorGUI.indentLevel++; + EditorGUILayout.PropertyField(serializedObject.FindProperty("m_InferenceDevice"), true); + EditorGUI.indentLevel--; + EditorGUILayout.PropertyField(serializedObject.FindProperty("m_UseHeuristic")); + // EditorGUILayout.PropertyField(serializedObject.FindProperty("m_Heuristic"), true); + EditorGUI.indentLevel--; + if (EditorGUI.EndChangeCheck()) + { + m_RequireReload = true; + } + DisplayFailedModelChecks(); + serializedObject.ApplyModifiedProperties(); + } + + /// + /// Must be called within OnEditorGUI() + /// + private void DisplayFailedModelChecks() + { + if (m_RequireReload && m_TimeSinceModelReload > k_TimeBetweenModelReloads) + { + m_RequireReload = false; + m_TimeSinceModelReload = 0; + } + // Display all failed checks + D.logEnabled = false; + Model barracudaModel = null; + var model = (NNModel)serializedObject.FindProperty("m_Model").objectReferenceValue; + var brainParameters = ((BehaviorParameters)target).brainParameters; + if (model != null) + { + barracudaModel = ModelLoader.Load(model.Value); + } + if (brainParameters != null) + { + var failedChecks = InferenceBrain.BarracudaModelParamLoader.CheckModel( + barracudaModel, brainParameters); + foreach (var check in failedChecks) + { + if (check != null) + { + EditorGUILayout.HelpBox(check, MessageType.Warning); + } + } + } + } + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/BCTeacherHelper.cs.meta b/UnitySDK/Assets/ML-Agents/Editor/BehaviorParametersEditor.cs.meta similarity index 83% rename from UnitySDK/Assets/ML-Agents/Scripts/BCTeacherHelper.cs.meta rename to UnitySDK/Assets/ML-Agents/Editor/BehaviorParametersEditor.cs.meta index 909b7060b1..6eb612f3e3 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/BCTeacherHelper.cs.meta +++ b/UnitySDK/Assets/ML-Agents/Editor/BehaviorParametersEditor.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: d1cf16abc39fb4d6ca81222fc73d1bb5 +guid: 72b0b21a2d4ee4bc2be0530fd134720d MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/UnitySDK/Assets/ML-Agents/Editor/BrainEditor.cs b/UnitySDK/Assets/ML-Agents/Editor/BrainEditor.cs deleted file mode 100644 index 6d10bfe645..0000000000 --- a/UnitySDK/Assets/ML-Agents/Editor/BrainEditor.cs +++ /dev/null @@ -1,36 +0,0 @@ -using UnityEngine; -using UnityEditor; - - -namespace MLAgents -{ - /// - /// CustomEditor for the Brain base class. Defines the default Inspector view for a Brain. - /// Shows the BrainParameters of the Brain and expose a tool to deep copy BrainParameters - /// between brains. - /// - [CustomEditor(typeof(Brain))] - public class BrainEditor : Editor - { - public override void OnInspectorGUI() - { - var brain = (Brain)target; - var brainToCopy = EditorGUILayout.ObjectField( - "Copy Brain Parameters from : ", null, typeof(Brain), false) as Brain; - if (brainToCopy != null) - { - brain.brainParameters = brainToCopy.brainParameters.Clone(); - EditorUtility.SetDirty(brain); - AssetDatabase.SaveAssets(); - return; - } - var serializedBrain = serializedObject; - serializedBrain.Update(); - EditorGUILayout.PropertyField(serializedBrain.FindProperty("brainParameters"), true); - serializedBrain.ApplyModifiedProperties(); - - // Draws a horizontal thick line - EditorGUILayout.LabelField("", GUI.skin.horizontalSlider); - } - } -} diff --git a/UnitySDK/Assets/ML-Agents/Editor/BrainEditor.cs.meta b/UnitySDK/Assets/ML-Agents/Editor/BrainEditor.cs.meta deleted file mode 100644 index c7eaf3f0c7..0000000000 --- a/UnitySDK/Assets/ML-Agents/Editor/BrainEditor.cs.meta +++ /dev/null @@ -1,3 +0,0 @@ -fileFormatVersion: 2 -guid: 7b07bebd03994ed08559c725da882b62 -timeCreated: 1537834304 \ No newline at end of file diff --git a/UnitySDK/Assets/ML-Agents/Editor/BrainParametersDrawer.cs b/UnitySDK/Assets/ML-Agents/Editor/BrainParametersDrawer.cs index 39c3c5e238..3890d69509 100644 --- a/UnitySDK/Assets/ML-Agents/Editor/BrainParametersDrawer.cs +++ b/UnitySDK/Assets/ML-Agents/Editor/BrainParametersDrawer.cs @@ -13,63 +13,39 @@ public class BrainParametersDrawer : PropertyDrawer // The height of a line in the Unity Inspectors private const float k_LineHeight = 17f; private const int k_VecObsNumLine = 3; - private const string k_CamResPropName = "cameraResolutions"; private const string k_ActionSizePropName = "vectorActionSize"; private const string k_ActionTypePropName = "vectorActionSpaceType"; private const string k_ActionDescriptionPropName = "vectorActionDescriptions"; private const string k_VecObsPropName = "vectorObservationSize"; private const string k_NumVecObsPropName = "numStackedVectorObservations"; - private const string k_CamWidthPropName = "width"; - private const string k_CamHeightPropName = "height"; - private const string k_CamGrayPropName = "blackAndWhite"; - private const int k_DefaultCameraWidth = 84; - private const int k_DefaultCameraHeight = 84; - private const bool k_DefaultCameraGray = false; /// public override float GetPropertyHeight(SerializedProperty property, GUIContent label) { - if (property.isExpanded) - { - return k_LineHeight + - GetHeightDrawVectorObservation() + - GetHeightDrawVisualObservation(property) + - GetHeightDrawVectorAction(property) + - GetHeightDrawVectorActionDescriptions(property); - } - return k_LineHeight; + return GetHeightDrawVectorObservation() + + GetHeightDrawVectorAction(property); + } /// public override void OnGUI(Rect position, SerializedProperty property, GUIContent label) { + var indent = EditorGUI.indentLevel; EditorGUI.indentLevel = 0; position.height = k_LineHeight; - property.isExpanded = EditorGUI.Foldout(position, property.isExpanded, label); - position.y += k_LineHeight; - if (property.isExpanded) - { - EditorGUI.BeginProperty(position, label, property); - EditorGUI.indentLevel++; - - // Vector Observations - DrawVectorObservation(position, property); - position.y += GetHeightDrawVectorObservation(); + EditorGUI.BeginProperty(position, label, property); + EditorGUI.indentLevel++; - //Visual Observations - DrawVisualObservations(position, property); - position.y += GetHeightDrawVisualObservation(property); + // Vector Observations + DrawVectorObservation(position, property); + position.y += GetHeightDrawVectorObservation(); - // Vector Action - DrawVectorAction(position, property); - position.y += GetHeightDrawVectorAction(property); + // Vector Action + DrawVectorAction(position, property); + position.y += GetHeightDrawVectorAction(property); - // Vector Action Descriptions - DrawVectorActionDescriptions(position, property); - position.y += GetHeightDrawVectorActionDescriptions(property); - EditorGUI.EndProperty(); - } + EditorGUI.EndProperty(); EditorGUI.indentLevel = indent; } @@ -111,107 +87,6 @@ private static float GetHeightDrawVectorObservation() return k_VecObsNumLine * k_LineHeight; } - /// - /// Draws the Visual Observations parameters for the Brain Parameters - /// - /// Rectangle on the screen to use for the property GUI. - /// The SerializedProperty of the BrainParameters - /// to make the custom GUI for. - private static void DrawVisualObservations(Rect position, SerializedProperty property) - { - EditorGUI.LabelField(position, "Visual Observations"); - position.y += k_LineHeight; - var quarter = position.width / 4; - var resolutions = property.FindPropertyRelative(k_CamResPropName); - DrawVisualObsButtons(position, resolutions); - position.y += k_LineHeight; - - // Display the labels for the columns : Index, Width, Height and Gray - var indexRect = new Rect(position.x, position.y, quarter, position.height); - var widthRect = new Rect(position.x + quarter, position.y, quarter, position.height); - var heightRect = new Rect(position.x + 2 * quarter, position.y, quarter, position.height); - var bwRect = new Rect(position.x + 3 * quarter, position.y, quarter, position.height); - EditorGUI.indentLevel++; - if (resolutions.arraySize > 0) - { - EditorGUI.LabelField(indexRect, "Index"); - indexRect.y += k_LineHeight; - EditorGUI.LabelField(widthRect, "Width"); - widthRect.y += k_LineHeight; - EditorGUI.LabelField(heightRect, "Height"); - heightRect.y += k_LineHeight; - EditorGUI.LabelField(bwRect, "Gray"); - bwRect.y += k_LineHeight; - } - - // Iterate over the resolutions - for (var i = 0; i < resolutions.arraySize; i++) - { - EditorGUI.LabelField(indexRect, "Obs " + i); - indexRect.y += k_LineHeight; - var res = resolutions.GetArrayElementAtIndex(i); - var w = res.FindPropertyRelative("width"); - w.intValue = EditorGUI.IntField(widthRect, w.intValue); - widthRect.y += k_LineHeight; - var h = res.FindPropertyRelative("height"); - h.intValue = EditorGUI.IntField(heightRect, h.intValue); - heightRect.y += k_LineHeight; - var bw = res.FindPropertyRelative("blackAndWhite"); - bw.boolValue = EditorGUI.Toggle(bwRect, bw.boolValue); - bwRect.y += k_LineHeight; - } - EditorGUI.indentLevel--; - } - - /// - /// Draws the buttons to add and remove the visual observations parameters - /// - /// Rectangle on the screen to use for the property GUI. - /// The SerializedProperty of the resolution array - /// to make the custom GUI for. - private static void DrawVisualObsButtons(Rect position, SerializedProperty resolutions) - { - var widthEighth = position.width / 8; - var addButtonRect = new Rect(position.x + widthEighth, position.y, - 3 * widthEighth, position.height); - var removeButtonRect = new Rect(position.x + 4 * widthEighth, position.y, - 3 * widthEighth, position.height); - if (resolutions.arraySize == 0) - { - addButtonRect.width *= 2; - } - // Display the buttons - if (GUI.Button(addButtonRect, "Add New", EditorStyles.miniButton)) - { - resolutions.arraySize += 1; - var newRes = resolutions.GetArrayElementAtIndex(resolutions.arraySize - 1); - newRes.FindPropertyRelative(k_CamWidthPropName).intValue = k_DefaultCameraWidth; - newRes.FindPropertyRelative(k_CamHeightPropName).intValue = k_DefaultCameraHeight; - newRes.FindPropertyRelative(k_CamGrayPropName).boolValue = k_DefaultCameraGray; - } - if (resolutions.arraySize > 0) - { - if (GUI.Button(removeButtonRect, "Remove Last", EditorStyles.miniButton)) - { - resolutions.arraySize -= 1; - } - } - } - - /// - /// The Height required to draw the Visual Observations parameters - /// - /// The height of the drawer of the Visual Observations - private static float GetHeightDrawVisualObservation(SerializedProperty property) - { - var visObsSize = property.FindPropertyRelative(k_CamResPropName).arraySize + 2; - if (property.FindPropertyRelative(k_CamResPropName).arraySize > 0) - { - visObsSize += 1; - } - return k_LineHeight * visObsSize; - } - /// /// Draws the Vector Actions parameters for the Brain Parameters /// @@ -302,68 +177,5 @@ private static float GetHeightDrawVectorAction(SerializedProperty property) } return actionSize * k_LineHeight; } - - /// - /// Draws the Vector Actions descriptions for the Brain Parameters - /// - /// Rectangle on the screen to use for the property GUI. - /// The SerializedProperty of the BrainParameters - /// to make the custom GUI for. - private static void DrawVectorActionDescriptions(Rect position, SerializedProperty property) - { - var bpVectorActionType = property.FindPropertyRelative(k_ActionTypePropName); - var vecActionSize = property.FindPropertyRelative(k_ActionSizePropName); - var numberOfDescriptions = 0; - if (bpVectorActionType.enumValueIndex == 1) - { - numberOfDescriptions = vecActionSize.GetArrayElementAtIndex(0).intValue; - } - else - { - numberOfDescriptions = vecActionSize.arraySize; - } - - EditorGUI.indentLevel++; - var vecActionDescriptions = - property.FindPropertyRelative(k_ActionDescriptionPropName); - vecActionDescriptions.arraySize = numberOfDescriptions; - if (bpVectorActionType.enumValueIndex == 1) - { - //Continuous case : - EditorGUI.PropertyField( - position, - vecActionDescriptions, - new GUIContent("Action Descriptions", - "A list of strings used to name the available actionsm for the Brain."), - true); - position.y += k_LineHeight; - } - else - { - // Discrete case : - EditorGUI.PropertyField( - position, - vecActionDescriptions, - new GUIContent("Branch Descriptions", - "A list of strings used to name the available branches for the Brain."), - true); - position.y += k_LineHeight; - } - } - - /// - /// The Height required to draw the Action Descriptions - /// - /// The height of the drawer of the Action Descriptions - private static float GetHeightDrawVectorActionDescriptions(SerializedProperty property) - { - var descriptionSize = 1; - if (property.FindPropertyRelative(k_ActionDescriptionPropName).isExpanded) - { - var descriptions = property.FindPropertyRelative(k_ActionDescriptionPropName); - descriptionSize += descriptions.arraySize + 1; - } - return descriptionSize * k_LineHeight; - } } } diff --git a/UnitySDK/Assets/ML-Agents/Editor/BroadcastHubDrawer.cs b/UnitySDK/Assets/ML-Agents/Editor/BroadcastHubDrawer.cs deleted file mode 100644 index 453ad31087..0000000000 --- a/UnitySDK/Assets/ML-Agents/Editor/BroadcastHubDrawer.cs +++ /dev/null @@ -1,207 +0,0 @@ -using UnityEngine; -using UnityEditor; -using System; -using UnityEditor.SceneManagement; -using UnityEngine.SceneManagement; - -namespace MLAgents -{ - /// - /// PropertyDrawer for BroadcastHub. Used to display the BroadcastHub in the Inspector. - /// - [CustomPropertyDrawer(typeof(BroadcastHub))] - public class BroadcastHubDrawer : PropertyDrawer - { - private BroadcastHub m_Hub; - // The height of a line in the Unity Inspectors - private const float k_LineHeight = 17f; - // The vertical space left below the BroadcastHub UI. - private const float k_ExtraSpaceBelow = 10f; - // The horizontal size of the Control checkbox - private const int k_ControlSize = 80; - - /// - /// Computes the height of the Drawer depending on the property it is showing - /// - /// The property that is being drawn. - /// The label of the property being drawn. - /// The vertical space needed to draw the property. - public override float GetPropertyHeight(SerializedProperty property, GUIContent label) - { - LazyInitializeHub(property); - var numLines = m_Hub.Count + 2 + (m_Hub.Count > 0 ? 1 : 0); - return (numLines) * k_LineHeight + k_ExtraSpaceBelow; - } - - /// - public override void OnGUI(Rect position, SerializedProperty property, GUIContent label) - { - LazyInitializeHub(property); - position.height = k_LineHeight; - EditorGUI.LabelField(position, new GUIContent(label.text, - "The Broadcast Hub helps you define which Brains you want to expose to " + - "the external process")); - position.y += k_LineHeight; - - EditorGUI.BeginProperty(position, label, property); - - EditorGUI.indentLevel++; - DrawAddRemoveButtons(position); - position.y += k_LineHeight; - - // This is the labels for each columns - var brainWidth = position.width - k_ControlSize; - var brainRect = new Rect( - position.x, position.y, brainWidth, position.height); - var controlRect = new Rect( - position.x + brainWidth, position.y, k_ControlSize, position.height); - if (m_Hub.Count > 0) - { - EditorGUI.LabelField(brainRect, "Brains"); - brainRect.y += k_LineHeight; - EditorGUI.LabelField(controlRect, "Control"); - controlRect.y += k_LineHeight; - controlRect.x += 15; - } - DrawBrains(brainRect, controlRect); - EditorGUI.indentLevel--; - EditorGUI.EndProperty(); - } - - /// - /// Draws the Add and Remove buttons. - /// - /// The position at which to draw. - private void DrawAddRemoveButtons(Rect position) - { - // This is the rectangle for the Add button - var addButtonRect = position; - addButtonRect.x += 20; - if (m_Hub.Count > 0) - { - addButtonRect.width /= 2; - addButtonRect.width -= 24; - var buttonContent = new GUIContent( - "Add New", "Add a new Brain to the Broadcast Hub"); - if (GUI.Button(addButtonRect, buttonContent, EditorStyles.miniButton)) - { - MarkSceneAsDirty(); - AddBrain(); - } - // This is the rectangle for the Remove button - var removeButtonRect = position; - removeButtonRect.x = position.width / 2 + 15; - removeButtonRect.width = addButtonRect.width - 18; - buttonContent = new GUIContent( - "Remove Last", "Remove the last Brain from the Broadcast Hub"); - if (GUI.Button(removeButtonRect, buttonContent, EditorStyles.miniButton)) - { - MarkSceneAsDirty(); - RemoveLastBrain(); - } - } - else - { - addButtonRect.width -= 50; - var buttonContent = new GUIContent( - "Add Brain to Broadcast Hub", "Add a new Brain to the Broadcast Hub"); - if (GUI.Button(addButtonRect, buttonContent, EditorStyles.miniButton)) - { - MarkSceneAsDirty(); - AddBrain(); - } - } - } - - /// - /// Draws the Brain and Control checkbox for the brains contained in the BroadCastHub. - /// - /// The Rect to draw the Brains. - /// The Rect to draw the control checkbox. - private void DrawBrains(Rect brainRect, Rect controlRect) - { - for (var index = 0; index < m_Hub.Count; index++) - { - var exposedBrains = m_Hub.broadcastingBrains; - var brain = exposedBrains[index]; - // This is the rectangle for the brain - EditorGUI.BeginChangeCheck(); - var newBrain = EditorGUI.ObjectField( - brainRect, brain, typeof(Brain), true) as Brain; - brainRect.y += k_LineHeight; - if (EditorGUI.EndChangeCheck()) - { - MarkSceneAsDirty(); - m_Hub.broadcastingBrains.RemoveAt(index); - var brainToInsert = exposedBrains.Contains(newBrain) ? null : newBrain; - exposedBrains.Insert(index, brainToInsert); - break; - } - // This is the Rectangle for the control checkbox - EditorGUI.BeginChangeCheck(); - if (brain is LearningBrain) - { - var isTraining = m_Hub.IsControlled(brain); - isTraining = EditorGUI.Toggle(controlRect, isTraining); - m_Hub.SetControlled(brain, isTraining); - } - controlRect.y += k_LineHeight; - if (EditorGUI.EndChangeCheck()) - { - MarkSceneAsDirty(); - } - } - } - - /// - /// Lazy initializes the Drawer with the property to be drawn. - /// - /// The SerializedProperty of the BroadcastHub - /// to make the custom GUI for. - private void LazyInitializeHub(SerializedProperty property) - { - if (m_Hub != null) - { - return; - } - var target = property.serializedObject.targetObject; - m_Hub = fieldInfo.GetValue(target) as BroadcastHub; - if (m_Hub == null) - { - m_Hub = new BroadcastHub(); - fieldInfo.SetValue(target, m_Hub); - } - } - - /// - /// Signals that the property has been modified and requires the scene to be saved for - /// the changes to persist. Only works when the Editor is not playing. - /// - private static void MarkSceneAsDirty() - { - if (!EditorApplication.isPlaying) - { - EditorSceneManager.MarkSceneDirty(SceneManager.GetActiveScene()); - } - } - - /// - /// Removes the last Brain from the BroadcastHub - /// - private void RemoveLastBrain() - { - if (m_Hub.Count > 0) - { - m_Hub.broadcastingBrains.RemoveAt(m_Hub.broadcastingBrains.Count - 1); - } - } - - /// - /// Adds a new Brain to the BroadcastHub. The value of this brain will not be initialized. - /// - private void AddBrain() - { - m_Hub.broadcastingBrains.Add(null); - } - } -} diff --git a/UnitySDK/Assets/ML-Agents/Editor/BroadcastHubDrawer.cs.meta b/UnitySDK/Assets/ML-Agents/Editor/BroadcastHubDrawer.cs.meta deleted file mode 100644 index 7ab682eb57..0000000000 --- a/UnitySDK/Assets/ML-Agents/Editor/BroadcastHubDrawer.cs.meta +++ /dev/null @@ -1,3 +0,0 @@ -fileFormatVersion: 2 -guid: aa1bef9e5833447ab7251fc6f7a3a609 -timeCreated: 1536852419 \ No newline at end of file diff --git a/UnitySDK/Assets/ML-Agents/Editor/Builder.cs b/UnitySDK/Assets/ML-Agents/Editor/Builder.cs deleted file mode 100644 index 1f4626bcd0..0000000000 --- a/UnitySDK/Assets/ML-Agents/Editor/Builder.cs +++ /dev/null @@ -1,14 +0,0 @@ -#if UNITY_CLOUD_BUILD - -namespace MLAgents -{ - public static class Builder - { - public static void PreExport() - { - BuilderUtils.SwitchAllLearningBrainToControlMode(); - } - } -} - -#endif diff --git a/UnitySDK/Assets/ML-Agents/Editor/Builder.cs.meta b/UnitySDK/Assets/ML-Agents/Editor/Builder.cs.meta deleted file mode 100644 index f10468becd..0000000000 --- a/UnitySDK/Assets/ML-Agents/Editor/Builder.cs.meta +++ /dev/null @@ -1,3 +0,0 @@ -fileFormatVersion: 2 -guid: 41ad366a346f4fbdaeb7dc8fff9a5025 -timeCreated: 1555622970 \ No newline at end of file diff --git a/UnitySDK/Assets/ML-Agents/Editor/BuilderUtils.cs b/UnitySDK/Assets/ML-Agents/Editor/BuilderUtils.cs deleted file mode 100644 index e5e94b01f2..0000000000 --- a/UnitySDK/Assets/ML-Agents/Editor/BuilderUtils.cs +++ /dev/null @@ -1,44 +0,0 @@ -#if UNITY_CLOUD_BUILD - -using System.Linq; -using UnityEditor; -using UnityEditor.SceneManagement; -using UnityEngine; -using System.IO; - -namespace MLAgents -{ - public static class BuilderUtils - { - public static void SwitchAllLearningBrainToControlMode() - { - Debug.Log("The Switching to control mode function is triggered"); - string[] scenePaths = Directory.GetFiles("Assets/ML-Agents/Examples/", "*.unity", SearchOption.AllDirectories); - foreach (string scenePath in scenePaths) - { - var curScene = EditorSceneManager.OpenScene(scenePath); - var aca = SceneAsset.FindObjectOfType(); - if (aca != null) - { - var learningBrains = aca.broadcastHub.broadcastingBrains.Where( - x => x != null && x is LearningBrain); - foreach (Brain brain in learningBrains) - { - if (!aca.broadcastHub.IsControlled(brain)) - { - Debug.Log("Switched brain in scene " + scenePath); - aca.broadcastHub.SetControlled(brain, true); - } - } - EditorSceneManager.SaveScene(curScene); - } - else - { - Debug.Log("scene " + scenePath + " doesn't have a Academy in it"); - } - } - } - } -} - -#endif diff --git a/UnitySDK/Assets/ML-Agents/Editor/BuilderUtils.cs.meta b/UnitySDK/Assets/ML-Agents/Editor/BuilderUtils.cs.meta deleted file mode 100644 index 0525afe73b..0000000000 --- a/UnitySDK/Assets/ML-Agents/Editor/BuilderUtils.cs.meta +++ /dev/null @@ -1,3 +0,0 @@ -fileFormatVersion: 2 -guid: 08deb8536161410982b88716adb02f69 -timeCreated: 1555623012 \ No newline at end of file diff --git a/UnitySDK/Assets/ML-Agents/Editor/DemonstrationDrawer.cs b/UnitySDK/Assets/ML-Agents/Editor/DemonstrationDrawer.cs index ecd50212a4..e3a3e503e8 100644 --- a/UnitySDK/Assets/ML-Agents/Editor/DemonstrationDrawer.cs +++ b/UnitySDK/Assets/ML-Agents/Editor/DemonstrationDrawer.cs @@ -59,33 +59,6 @@ static string BuildActionArrayLabel(SerializedProperty actionSizeProperty) return actionLabel.ToString(); } - /// - /// Constructs complex label for each CameraResolution object. - /// An example of this could be `[ 84 X 84 ]` - /// for a single camera with 84 pixels height and width. - /// - private static string BuildCameraResolutionLabel(SerializedProperty cameraArray) - { - var numCameras = cameraArray.arraySize; - var cameraLabel = new StringBuilder("[ "); - for (var i = 0; i < numCameras; i++) - { - var camHeightPropName = - cameraArray.GetArrayElementAtIndex(i).FindPropertyRelative("height"); - cameraLabel.Append(camHeightPropName.intValue); - cameraLabel.Append(" X "); - var camWidthPropName = - cameraArray.GetArrayElementAtIndex(i).FindPropertyRelative("width"); - cameraLabel.Append(camWidthPropName.intValue); - if (i < numCameras - 1) - { - cameraLabel.Append(", "); - } - } - - cameraLabel.Append(" ]"); - return cameraLabel.ToString(); - } /// /// Renders Inspector UI for Brain Parameters of Demonstration. @@ -95,21 +68,18 @@ void MakeBrainParametersProperty(SerializedProperty property) var vecObsSizeProp = property.FindPropertyRelative("vectorObservationSize"); var numStackedProp = property.FindPropertyRelative("numStackedVectorObservations"); var actSizeProperty = property.FindPropertyRelative("vectorActionSize"); - var camResProp = property.FindPropertyRelative("cameraResolutions"); var actSpaceTypeProp = property.FindPropertyRelative("vectorActionSpaceType"); var vecObsSizeLabel = vecObsSizeProp.displayName + ": " + vecObsSizeProp.intValue; var numStackedLabel = numStackedProp.displayName + ": " + numStackedProp.intValue; var vecActSizeLabel = actSizeProperty.displayName + ": " + BuildActionArrayLabel(actSizeProperty); - var camResLabel = camResProp.displayName + ": " + BuildCameraResolutionLabel(camResProp); var actSpaceTypeLabel = actSpaceTypeProp.displayName + ": " + (SpaceType)actSpaceTypeProp.enumValueIndex; EditorGUILayout.LabelField(vecObsSizeLabel); EditorGUILayout.LabelField(numStackedLabel); EditorGUILayout.LabelField(vecActSizeLabel); - EditorGUILayout.LabelField(camResLabel); EditorGUILayout.LabelField(actSpaceTypeLabel); } diff --git a/UnitySDK/Assets/ML-Agents/Editor/DemonstrationImporter.cs b/UnitySDK/Assets/ML-Agents/Editor/DemonstrationImporter.cs index 0a8b5b3322..1f1acab322 100644 --- a/UnitySDK/Assets/ML-Agents/Editor/DemonstrationImporter.cs +++ b/UnitySDK/Assets/ML-Agents/Editor/DemonstrationImporter.cs @@ -29,11 +29,11 @@ public override void OnImportAsset(AssetImportContext ctx) Stream reader = File.OpenRead(ctx.assetPath); var metaDataProto = DemonstrationMetaProto.Parser.ParseDelimitedFrom(reader); - var metaData = new DemonstrationMetaData(metaDataProto); + var metaData = metaDataProto.ToDemonstrationMetaData(); reader.Seek(DemonstrationStore.MetaDataBytes + 1, 0); var brainParamsProto = BrainParametersProto.Parser.ParseDelimitedFrom(reader); - var brainParameters = new BrainParameters(brainParamsProto); + var brainParameters = brainParamsProto.ToBrainParameters(); reader.Close(); diff --git a/UnitySDK/Assets/ML-Agents/Editor/HeuristicBrainEditor.cs b/UnitySDK/Assets/ML-Agents/Editor/HeuristicBrainEditor.cs deleted file mode 100644 index 97abcd9c3a..0000000000 --- a/UnitySDK/Assets/ML-Agents/Editor/HeuristicBrainEditor.cs +++ /dev/null @@ -1,56 +0,0 @@ -using UnityEngine; -using UnityEditor; - -namespace MLAgents -{ - /// - /// CustomEditor for the Heuristic Brain class. Defines the default Inspector view for a - /// HeuristicBrain. - /// Shows the BrainParameters of the Brain and expose a tool to deep copy BrainParameters - /// between brains. Provides a drag box for a Decision Monoscript that will be used by - /// the Heuristic Brain. - /// - [CustomEditor(typeof(HeuristicBrain))] - public class HeuristicBrainEditor : BrainEditor - { - public override void OnInspectorGUI() - { - EditorGUILayout.LabelField("Heuristic Brain", EditorStyles.boldLabel); - var brain = (HeuristicBrain)target; - base.OnInspectorGUI(); - - // Expose the Heuristic Brain's Monoscript for decision in a drag and drop box. - brain.decisionScript = EditorGUILayout.ObjectField( - "Decision Script", brain.decisionScript, typeof(MonoScript), true) as MonoScript; - - CheckIsDecision(brain); - // Draw an error box if the Decision is not set. - if (brain.decisionScript == null) - { - EditorGUILayout.HelpBox("You need to add a 'Decision' component to this Object", - MessageType.Error); - } - } - - /// - /// Ensures tht the Monoscript for the decision of the HeuristicBrain is either null or - /// an implementation of Decision. If the Monoscript is not an implementation of - /// Decision, it will be set to null. - /// - /// The HeuristicBrain with the decision script attached - private static void CheckIsDecision(HeuristicBrain brain) - { - if (brain.decisionScript != null) - { - var decisionInstance = (CreateInstance(brain.decisionScript.name) as Decision); - if (decisionInstance == null) - { - Debug.LogError( - "Instance of " + brain.decisionScript.name + " couldn't be created. " + - "The script class needs to derive from Decision."); - brain.decisionScript = null; - } - } - } - } -} diff --git a/UnitySDK/Assets/ML-Agents/Editor/HeuristicBrainEditor.cs.meta b/UnitySDK/Assets/ML-Agents/Editor/HeuristicBrainEditor.cs.meta deleted file mode 100644 index 304d51f239..0000000000 --- a/UnitySDK/Assets/ML-Agents/Editor/HeuristicBrainEditor.cs.meta +++ /dev/null @@ -1,3 +0,0 @@ -fileFormatVersion: 2 -guid: c3347a9ad704411896dd4898423c6515 -timeCreated: 1536852553 \ No newline at end of file diff --git a/UnitySDK/Assets/ML-Agents/Editor/LearningBrainEditor.cs b/UnitySDK/Assets/ML-Agents/Editor/LearningBrainEditor.cs deleted file mode 100644 index 5fe421f2c5..0000000000 --- a/UnitySDK/Assets/ML-Agents/Editor/LearningBrainEditor.cs +++ /dev/null @@ -1,84 +0,0 @@ -using UnityEngine; -using UnityEditor; - -namespace MLAgents -{ - /// - /// CustomEditor for the LearningBrain class. Defines the default Inspector view for a - /// LearningBrain. - /// Shows the BrainParameters of the Brain and expose a tool to deep copy BrainParameters - /// between brains. Also exposes a drag box for the Model that will be used by the - /// LearningBrain. - /// - [CustomEditor(typeof(LearningBrain))] - public class LearningBrainEditor : BrainEditor - { - private const string k_ModelPropName = "model"; - private const string k_InferenceDevicePropName = "inferenceDevice"; - private const float k_TimeBetweenModelReloads = 2f; - // Time since the last reload of the model - private float m_TimeSinceModelReload; - // Whether or not the model needs to be reloaded - private bool m_RequireReload; - - /// - /// Called when the user opens the Inspector for the LearningBrain - /// - public void OnEnable() - { - m_RequireReload = true; - EditorApplication.update += IncreaseTimeSinceLastModelReload; - } - - /// - /// Called when the user leaves the Inspector for the LearningBrain - /// - public void OnDisable() - { - EditorApplication.update -= IncreaseTimeSinceLastModelReload; - } - - public override void OnInspectorGUI() - { - EditorGUILayout.LabelField("Learning Brain", EditorStyles.boldLabel); - var brain = (LearningBrain)target; - var serializedBrain = serializedObject; - EditorGUI.BeginChangeCheck(); - base.OnInspectorGUI(); - serializedBrain.Update(); - var tfGraphModel = serializedBrain.FindProperty(k_ModelPropName); - EditorGUILayout.ObjectField(tfGraphModel); - var inferenceDevice = serializedBrain.FindProperty(k_InferenceDevicePropName); - EditorGUILayout.PropertyField(inferenceDevice); - serializedBrain.ApplyModifiedProperties(); - if (EditorGUI.EndChangeCheck()) - { - m_RequireReload = true; - } - if (m_RequireReload && m_TimeSinceModelReload > k_TimeBetweenModelReloads) - { - brain.ReloadModel(); - m_RequireReload = false; - m_TimeSinceModelReload = 0; - } - // Display all failed checks - var failedChecks = brain.GetModelFailedChecks(); - foreach (var check in failedChecks) - { - if (check != null) - { - EditorGUILayout.HelpBox(check, MessageType.Warning); - } - } - } - - /// - /// Increases the time since last model reload by the deltaTime since the last Update call - /// from the UnityEditor - /// - private void IncreaseTimeSinceLastModelReload() - { - m_TimeSinceModelReload += Time.deltaTime; - } - } -} diff --git a/UnitySDK/Assets/ML-Agents/Editor/LearningBrainEditor.cs.meta b/UnitySDK/Assets/ML-Agents/Editor/LearningBrainEditor.cs.meta deleted file mode 100644 index ce3229c291..0000000000 --- a/UnitySDK/Assets/ML-Agents/Editor/LearningBrainEditor.cs.meta +++ /dev/null @@ -1,3 +0,0 @@ -fileFormatVersion: 2 -guid: b538d92cc78b4a62a596822eca31423e -timeCreated: 1536970736 \ No newline at end of file diff --git a/UnitySDK/Assets/ML-Agents/Editor/PlayerBrainEditor.cs b/UnitySDK/Assets/ML-Agents/Editor/PlayerBrainEditor.cs deleted file mode 100644 index f235bbf8c1..0000000000 --- a/UnitySDK/Assets/ML-Agents/Editor/PlayerBrainEditor.cs +++ /dev/null @@ -1,103 +0,0 @@ -using UnityEngine; - -using UnityEditor; - -namespace MLAgents -{ - /// - /// CustomEditor for the PlayerBrain class. Defines the default Inspector view for a - /// PlayerBrain. - /// Shows the BrainParameters of the Brain and expose a tool to deep copy BrainParameters - /// between brains. Also exposes the key mappings for either continuous or discrete control - /// depending on the Vector Action Space Type of the Brain Parameter. These mappings are the - /// ones that will be used by the PlayerBrain. - /// - [CustomEditor(typeof(PlayerBrain))] - public class PlayerBrainEditor : BrainEditor - { - private const string k_KeyContinuousPropName = "keyContinuousPlayerActions"; - private const string k_KeyDiscretePropName = "discretePlayerActions"; - private const string k_AxisContinuousPropName = "axisContinuousPlayerActions"; - - public override void OnInspectorGUI() - { - EditorGUILayout.LabelField("Player Brain", EditorStyles.boldLabel); - var brain = (PlayerBrain)target; - var serializedBrain = serializedObject; - base.OnInspectorGUI(); - - serializedBrain.Update(); - if (brain.brainParameters.vectorActionSpaceType == SpaceType.Continuous) - { - DrawContinuousKeyMapping(serializedBrain, brain); - } - else - { - DrawDiscreteKeyMapping(serializedBrain); - } - serializedBrain.ApplyModifiedProperties(); - } - - /// - /// Draws the UI for continuous control key mapping to actions. - /// - /// The SerializedObject corresponding to the brain. - /// The Brain of which properties are displayed. - private static void DrawContinuousKeyMapping( - SerializedObject serializedBrain, PlayerBrain brain) - { - GUILayout.Label("Edit the continuous inputs for your actions", EditorStyles.boldLabel); - var keyActionsProp = serializedBrain.FindProperty(k_KeyContinuousPropName); - var axisActionsProp = serializedBrain.FindProperty(k_AxisContinuousPropName); - EditorGUILayout.PropertyField(keyActionsProp , true); - EditorGUILayout.PropertyField(axisActionsProp, true); - var keyContinuous = brain.keyContinuousPlayerActions; - var axisContinuous = brain.axisContinuousPlayerActions; - var brainParams = brain.brainParameters; - if (keyContinuous == null) - { - keyContinuous = new PlayerBrain.KeyContinuousPlayerAction[0]; - } - if (axisContinuous == null) - { - axisContinuous = new PlayerBrain.AxisContinuousPlayerAction[0]; - } - foreach (var action in keyContinuous) - { - if (action.index >= brainParams.vectorActionSize[0]) - { - EditorGUILayout.HelpBox( - $"Key {action.key.ToString()} is assigned to index " + - $"{action.index.ToString()} but the action size is only of size " + - $"{brainParams.vectorActionSize}", - MessageType.Error); - } - } - foreach (var action in axisContinuous) - { - if (action.index >= brainParams.vectorActionSize[0]) - { - EditorGUILayout.HelpBox( - $"Axis {action.axis} is assigned to index {action.index.ToString()} " + - $"but the action size is only of size {brainParams.vectorActionSize}", - MessageType.Error); - } - } - GUILayout.Label("You can change axis settings from Edit->Project Settings->Input", - EditorStyles.helpBox); - } - - /// - /// Draws the UI for discrete control key mapping to actions. - /// - /// The SerializedObject corresponding to the brain. - private static void DrawDiscreteKeyMapping(SerializedObject serializedBrain) - { - GUILayout.Label("Edit the discrete inputs for your actions", - EditorStyles.boldLabel); - var dhas = serializedBrain.FindProperty(k_KeyDiscretePropName); - serializedBrain.Update(); - EditorGUILayout.PropertyField(dhas, true); - } - } -} diff --git a/UnitySDK/Assets/ML-Agents/Editor/PlayerBrainEditor.cs.meta b/UnitySDK/Assets/ML-Agents/Editor/PlayerBrainEditor.cs.meta deleted file mode 100644 index 82acf52b77..0000000000 --- a/UnitySDK/Assets/ML-Agents/Editor/PlayerBrainEditor.cs.meta +++ /dev/null @@ -1,3 +0,0 @@ -fileFormatVersion: 2 -guid: 0d99e43f78e54b4f96a346219e2ca2d2 -timeCreated: 1536851993 \ No newline at end of file diff --git a/UnitySDK/Assets/ML-Agents/Editor/Tests/DemonstrationTests.cs b/UnitySDK/Assets/ML-Agents/Editor/Tests/DemonstrationTests.cs index d72c1262bf..4500bd21ed 100644 --- a/UnitySDK/Assets/ML-Agents/Editor/Tests/DemonstrationTests.cs +++ b/UnitySDK/Assets/ML-Agents/Editor/Tests/DemonstrationTests.cs @@ -33,7 +33,6 @@ public void TestStoreInitalize() { vectorObservationSize = 3, numStackedVectorObservations = 2, - cameraResolutions = new[] {new Resolution()}, vectorActionDescriptions = new[] {"TestActionA", "TestActionB"}, vectorActionSize = new[] {2, 2}, vectorActionSpaceType = SpaceType.Discrete @@ -47,7 +46,6 @@ public void TestStoreInitalize() var agentInfo = new AgentInfo { reward = 1f, - visualObservations = new List(), actionMasks = new[] {false, true}, done = true, id = 5, diff --git a/UnitySDK/Assets/ML-Agents/Editor/Tests/EditModeTestInternalBrainTensorApplier.cs b/UnitySDK/Assets/ML-Agents/Editor/Tests/EditModeTestInternalBrainTensorApplier.cs index 2f67af2ca5..3b3ef2245d 100644 --- a/UnitySDK/Assets/ML-Agents/Editor/Tests/EditModeTestInternalBrainTensorApplier.cs +++ b/UnitySDK/Assets/ML-Agents/Editor/Tests/EditModeTestInternalBrainTensorApplier.cs @@ -1,5 +1,4 @@ using System.Collections.Generic; -using System.Linq; using NUnit.Framework; using UnityEngine; using System.Reflection; @@ -20,16 +19,14 @@ public AgentAction GetAction() } } - private Dictionary GetFakeAgentInfos() + private List GetFakeAgentInfos() { var goA = new GameObject("goA"); var agentA = goA.AddComponent(); - var infoA = new AgentInfo(); var goB = new GameObject("goB"); var agentB = goB.AddComponent(); - var infoB = new AgentInfo(); - return new Dictionary(){{agentA, infoA}, {agentB, infoB}}; + return new List {agentA, agentB}; } [Test] @@ -54,7 +51,7 @@ public void ApplyContinuousActionOutput() var applier = new ContinuousActionOutputApplier(); applier.Apply(inputTensor, agentInfos); - var agents = agentInfos.Keys.ToList(); + var agents = agentInfos; var agent = agents[0] as TestAgent; Assert.NotNull(agent); @@ -86,7 +83,7 @@ public void ApplyDiscreteActionOutput() var alloc = new TensorCachingAllocator(); var applier = new DiscreteActionOutputApplier(new[] {2, 3}, 0, alloc); applier.Apply(inputTensor, agentInfos); - var agents = agentInfos.Keys.ToList(); + var agents = agentInfos; var agent = agents[0] as TestAgent; Assert.NotNull(agent); @@ -117,7 +114,7 @@ public void ApplyMemoryOutput() var applier = new MemoryOutputApplier(); applier.Apply(inputTensor, agentInfos); - var agents = agentInfos.Keys.ToList(); + var agents = agentInfos; var agent = agents[0] as TestAgent; Assert.NotNull(agent); @@ -144,7 +141,7 @@ public void ApplyValueEstimate() var applier = new ValueEstimateApplier(); applier.Apply(inputTensor, agentInfos); - var agents = agentInfos.Keys.ToList(); + var agents = agentInfos; var agent = agents[0] as TestAgent; Assert.NotNull(agent); diff --git a/UnitySDK/Assets/ML-Agents/Editor/Tests/EditModeTestInternalBrainTensorGenerator.cs b/UnitySDK/Assets/ML-Agents/Editor/Tests/EditModeTestInternalBrainTensorGenerator.cs index ce62ac7259..0d64b92214 100644 --- a/UnitySDK/Assets/ML-Agents/Editor/Tests/EditModeTestInternalBrainTensorGenerator.cs +++ b/UnitySDK/Assets/ML-Agents/Editor/Tests/EditModeTestInternalBrainTensorGenerator.cs @@ -9,32 +9,30 @@ namespace MLAgents.Tests { public class EditModeTestInternalBrainTensorGenerator { - private class TestAgent : Agent - { - } - - private Dictionary GetFakeAgentInfos() + private static IEnumerable GetFakeAgentInfos() { var goA = new GameObject("goA"); var agentA = goA.AddComponent(); - var infoA = new AgentInfo() + var infoA = new AgentInfo { - stackedVectorObservation = (new[] {1f, 2f, 3f}).ToList(), + stackedVectorObservation = new[] { 1f, 2f, 3f }.ToList(), memories = null, - storedVectorActions = new[] {1f, 2f}, - actionMasks = null, + storedVectorActions = new[] { 1f, 2f }, + actionMasks = null }; var goB = new GameObject("goB"); var agentB = goB.AddComponent(); - var infoB = new AgentInfo() + var infoB = new AgentInfo { - stackedVectorObservation = (new[] {4f, 5f, 6f}).ToList(), - memories = (new[] {1f, 1f, 1f}).ToList(), - storedVectorActions = new[] {3f, 4f}, - actionMasks = new[] {true, false, false, false, false}, + stackedVectorObservation = new[] { 4f, 5f, 6f }.ToList(), + memories = new[] { 1f, 1f, 1f }.ToList(), + storedVectorActions = new[] { 3f, 4f }, + actionMasks = new[] { true, false, false, false, false }, }; + agentA.Info = infoA; + agentB.Info = infoB; - return new Dictionary(){{agentA, infoA}, {agentB, infoB}}; + return new List { agentA, agentB }; } [Test] @@ -76,9 +74,9 @@ public void GenerateSequenceLength() [Test] public void GenerateVectorObservation() { - var inputTensor = new TensorProxy() + var inputTensor = new TensorProxy { - shape = new long[] {2, 3} + shape = new long[] { 2, 3 } }; const int batchSize = 4; var agentInfos = GetFakeAgentInfos(); @@ -96,9 +94,9 @@ public void GenerateVectorObservation() [Test] public void GenerateRecurrentInput() { - var inputTensor = new TensorProxy() + var inputTensor = new TensorProxy { - shape = new long[] {2, 5} + shape = new long[] { 2, 5 } }; const int batchSize = 4; var agentInfos = GetFakeAgentInfos(); @@ -116,9 +114,9 @@ public void GenerateRecurrentInput() [Test] public void GeneratePreviousActionInput() { - var inputTensor = new TensorProxy() + var inputTensor = new TensorProxy { - shape = new long[] {2, 2}, + shape = new long[] { 2, 2 }, valueType = TensorProxy.TensorType.Integer }; const int batchSize = 4; @@ -138,9 +136,9 @@ public void GeneratePreviousActionInput() [Test] public void GenerateActionMaskInput() { - var inputTensor = new TensorProxy() + var inputTensor = new TensorProxy { - shape = new long[] {2, 5}, + shape = new long[] { 2, 5 }, valueType = TensorProxy.TensorType.FloatingPoint }; const int batchSize = 4; diff --git a/UnitySDK/Assets/ML-Agents/Editor/Tests/MLAgentsEditModeTest.cs b/UnitySDK/Assets/ML-Agents/Editor/Tests/MLAgentsEditModeTest.cs index 7047e72352..a5602f2a03 100644 --- a/UnitySDK/Assets/ML-Agents/Editor/Tests/MLAgentsEditModeTest.cs +++ b/UnitySDK/Assets/ML-Agents/Editor/Tests/MLAgentsEditModeTest.cs @@ -1,6 +1,8 @@ using UnityEngine; using NUnit.Framework; using System.Reflection; +using MLAgents.Sensor; +using MLAgents.InferenceBrain; namespace MLAgents.Tests { @@ -16,7 +18,6 @@ public override void InitializeAcademy() public override void AcademyReset() { - } public override void AcademyStep() @@ -34,11 +35,19 @@ public class TestAgent : Agent public override void InitializeAgent() { initializeAgentCalls += 1; + + // Add in some custom sensors so we can confirm they get sorted as expected. + var sensor1 = new TestSensor("testsensor1"); + var sensor2 = new TestSensor("testsensor2"); + + m_Sensors.Add(sensor2); + m_Sensors.Add(sensor1); } public override void CollectObservations() { collectObservationsCalls += 1; + AddVectorObs(0f); } public override void AgentAction(float[] vectorAction, string textAction) @@ -56,31 +65,44 @@ public override void AgentOnDone() { agentOnDoneCalls += 1; } + + public override float[] Heuristic() + { + return new float[0]; + } } - // This is an empty class for testing the behavior of agents and academy - // It is left empty because we are not testing any brain behavior - public class TestBrain : Brain + public class TestSensor : ISensor { - public int numberOfCallsToInitialize; - public int numberOfCallsToDecideAction; - public static TestBrain Instantiate() + public string sensorName; + + public TestSensor(string n) { - return CreateInstance(); + sensorName = n; } - protected override void Initialize() + public int[] GetFloatObservationShape() { - numberOfCallsToInitialize++; + return new[] { 1 }; } - protected override void DecideAction() + public void WriteToTensor(TensorProxy tensorProxy, int agentIndex) { } + + public byte[] GetCompressedObservation() { - numberOfCallsToDecideAction++; - m_AgentInfos.Clear(); + return null; } - } + public SensorCompressionType GetCompressionType() + { + return SensorCompressionType.None; + } + + public string GetName() + { + return sensorName; + } + } public class EditModeTestGeneration { @@ -116,6 +138,7 @@ public void TestAcademy() var acaGo = new GameObject("TestAcademy"); acaGo.AddComponent(); var aca = acaGo.GetComponent(); + aca.resetParameters = new ResetParameters(); Assert.AreEqual(0, aca.initializeAcademyCalls); Assert.AreEqual(0, aca.GetStepCount()); Assert.AreEqual(0, aca.GetEpisodeCount()); @@ -141,11 +164,7 @@ public void TestAgent() var acaGo = new GameObject("TestAcademy"); acaGo.AddComponent(); var aca = acaGo.GetComponent(); - var brain = TestBrain.Instantiate(); - brain.brainParameters = new BrainParameters(); - brain.brainParameters.vectorObservationSize = 0; - agent1.GiveBrain(brain); - agent2.GiveBrain(brain); + aca.resetParameters = new ResetParameters(); Assert.AreEqual(false, agent1.IsDone()); Assert.AreEqual(false, agent2.IsDone()); @@ -176,6 +195,10 @@ public void TestAgent() Assert.AreEqual(1, agent2.initializeAgentCalls); Assert.AreEqual(0, agent1.agentActionCalls); Assert.AreEqual(0, agent2.agentActionCalls); + + // Make sure the sensors were sorted + Assert.AreEqual(agent1.m_Sensors[0].GetName(), "testsensor1"); + Assert.AreEqual(agent1.m_Sensors[1].GetName(), "testsensor2"); } } @@ -187,6 +210,7 @@ public void TestAcademy() var acaGo = new GameObject("TestAcademy"); acaGo.AddComponent(); var aca = acaGo.GetComponent(); + aca.resetParameters = new ResetParameters(); var academyInitializeMethod = typeof(Academy).GetMethod("InitializeEnvironment", BindingFlags.Instance | BindingFlags.NonPublic); academyInitializeMethod?.Invoke(aca, new object[] { }); @@ -223,7 +247,7 @@ public void TestAgent() var acaGo = new GameObject("TestAcademy"); acaGo.AddComponent(); var aca = acaGo.GetComponent(); - var brain = TestBrain.Instantiate(); + aca.resetParameters = new ResetParameters(); var agentEnableMethod = typeof(Agent).GetMethod( @@ -233,17 +257,12 @@ public void TestAgent() agent1.agentParameters = new AgentParameters(); agent2.agentParameters = new AgentParameters(); - brain.brainParameters = new BrainParameters(); // We use event based so the agent will now try to send anything to the brain agent1.agentParameters.onDemandDecision = false; agent1.agentParameters.numberOfActionsBetweenDecisions = 2; // agent1 will take an action at every step and request a decision every 2 steps agent2.agentParameters.onDemandDecision = true; // agent2 will request decisions only when RequestDecision is called - brain.brainParameters.vectorObservationSize = 0; - brain.brainParameters.cameraResolutions = new Resolution[0]; - agent1.GiveBrain(brain); - agent2.GiveBrain(brain); agentEnableMethod?.Invoke(agent1, new object[] { aca }); academyInitializeMethod?.Invoke(aca, new object[] { }); @@ -306,6 +325,7 @@ public void TestAcademy() var acaGo = new GameObject("TestAcademy"); acaGo.AddComponent(); var aca = acaGo.GetComponent(); + aca.resetParameters = new ResetParameters(); var academyInitializeMethod = typeof(Academy).GetMethod( "InitializeEnvironment", BindingFlags.Instance | BindingFlags.NonPublic); academyInitializeMethod?.Invoke(aca, new object[] { }); @@ -328,7 +348,7 @@ public void TestAcademy() } stepsSinceReset += 1; - academyStepMethod.Invoke((object)aca, new object[] { }); + academyStepMethod.Invoke(aca, new object[] { }); } } @@ -344,7 +364,7 @@ public void TestAgent() var acaGo = new GameObject("TestAcademy"); acaGo.AddComponent(); var aca = acaGo.GetComponent(); - var brain = TestBrain.Instantiate(); + aca.resetParameters = new ResetParameters(); var agentEnableMethod = typeof(Agent).GetMethod( @@ -357,17 +377,11 @@ public void TestAgent() agent1.agentParameters = new AgentParameters(); agent2.agentParameters = new AgentParameters(); - brain.brainParameters = new BrainParameters(); // We use event based so the agent will now try to send anything to the brain agent1.agentParameters.onDemandDecision = false; agent1.agentParameters.numberOfActionsBetweenDecisions = 2; // agent1 will take an action at every step and request a decision every 2 steps agent2.agentParameters.onDemandDecision = true; - // agent2 will request decisions only when RequestDecision is called - brain.brainParameters.vectorObservationSize = 0; - brain.brainParameters.cameraResolutions = new Resolution[0]; - agent1.GiveBrain(brain); - agent2.GiveBrain(brain); agentEnableMethod?.Invoke(agent2, new object[] { aca }); academyInitializeMethod?.Invoke(aca, new object[] { }); @@ -456,7 +470,7 @@ public void TestResetOnDone() var acaGo = new GameObject("TestAcademy"); acaGo.AddComponent(); var aca = acaGo.GetComponent(); - var brain = TestBrain.Instantiate(); + aca.resetParameters = new ResetParameters(); var agentEnableMethod = typeof(Agent).GetMethod( @@ -469,7 +483,6 @@ public void TestResetOnDone() agent1.agentParameters = new AgentParameters(); agent2.agentParameters = new AgentParameters(); - brain.brainParameters = new BrainParameters(); // We use event based so the agent will now try to send anything to the brain agent1.agentParameters.onDemandDecision = false; // agent1 will take an action at every step and request a decision every steps @@ -480,10 +493,6 @@ public void TestResetOnDone() //Here we specify that the agent does not reset when done agent1.agentParameters.resetOnDone = false; agent2.agentParameters.resetOnDone = false; - brain.brainParameters.vectorObservationSize = 0; - brain.brainParameters.cameraResolutions = new Resolution[0]; - agent1.GiveBrain(brain); - agent2.GiveBrain(brain); agentEnableMethod?.Invoke(agent2, new object[] { aca }); academyInitializeMethod?.Invoke(aca, new object[] { }); @@ -538,7 +547,7 @@ public void TestCumulativeReward() var acaGo = new GameObject("TestAcademy"); acaGo.AddComponent(); var aca = acaGo.GetComponent(); - var brain = TestBrain.Instantiate(); + aca.resetParameters = new ResetParameters(); var agentEnableMethod = typeof(Agent).GetMethod( @@ -551,7 +560,6 @@ public void TestCumulativeReward() agent1.agentParameters = new AgentParameters(); agent2.agentParameters = new AgentParameters(); - brain.brainParameters = new BrainParameters(); // We use event based so the agent will now try to send anything to the brain agent1.agentParameters.onDemandDecision = false; agent1.agentParameters.numberOfActionsBetweenDecisions = 3; @@ -559,10 +567,6 @@ public void TestCumulativeReward() agent2.agentParameters.onDemandDecision = true; // agent2 will request decisions only when RequestDecision is called agent1.agentParameters.maxStep = 20; - brain.brainParameters.vectorObservationSize = 0; - brain.brainParameters.cameraResolutions = new Resolution[0]; - agent1.GiveBrain(brain); - agent2.GiveBrain(brain); agentEnableMethod?.Invoke(agent2, new object[] { aca }); academyInitializeMethod?.Invoke(aca, new object[] { }); diff --git a/UnitySDK/Assets/ML-Agents/Editor/Tests/StandaloneBuildTest.cs b/UnitySDK/Assets/ML-Agents/Editor/Tests/StandaloneBuildTest.cs new file mode 100644 index 0000000000..d88aacbd43 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Editor/Tests/StandaloneBuildTest.cs @@ -0,0 +1,48 @@ +using System; +using UnityEditor; +using UnityEngine; +#if UNITY_2018_1_OR_NEWER +using UnityEditor.Build.Reporting; +#endif + +namespace MLAgents +{ + public class StandaloneBuildTest + { + static void BuildStandalonePlayerOSX() + { + string[] scenes = { "Assets/ML-Agents/Examples/3DBall/Scenes/3DBall.unity" }; + var buildResult = BuildPipeline.BuildPlayer(scenes, "testPlayer", BuildTarget.StandaloneOSX, BuildOptions.None); +#if UNITY_2018_1_OR_NEWER + var isOK = buildResult.summary.result == BuildResult.Succeeded; + var error = ""; + foreach (var stepInfo in buildResult.steps) + { + foreach (var msg in stepInfo.messages) + { + if (msg.type != LogType.Log && msg.type != LogType.Warning) + { + error += msg.content + "\n"; + } + } + } +#else + var error = buildResult; + var isOK = string.IsNullOrEmpty(error); +#endif + if (isOK) + { + EditorApplication.Exit(0); + } + else + { + Console.Error.WriteLine(error); + EditorApplication.Exit(1); + + } + Debug.Log(error); + + } + + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/AgentInfoProto.cs.meta b/UnitySDK/Assets/ML-Agents/Editor/Tests/StandaloneBuildTest.cs.meta similarity index 83% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/AgentInfoProto.cs.meta rename to UnitySDK/Assets/ML-Agents/Editor/Tests/StandaloneBuildTest.cs.meta index a663228813..7148a78558 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/AgentInfoProto.cs.meta +++ b/UnitySDK/Assets/ML-Agents/Editor/Tests/StandaloneBuildTest.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 791522439b8324bff85f84309db90ecc +guid: a0c87e095448c49e9812863bb5d1f4e1 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/UnitySDK/Assets/ML-Agents/Editor/Tests/TimerTest.cs b/UnitySDK/Assets/ML-Agents/Editor/Tests/TimerTest.cs new file mode 100644 index 0000000000..befb825e97 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Editor/Tests/TimerTest.cs @@ -0,0 +1,36 @@ +using NUnit.Framework; +using UnityEngine; + +namespace MLAgents.Tests +{ + public class TimerTests + { + [Test] + public void TestNested() + { + TimerStack myTimer = TimerStack.Instance; + myTimer.Reset(); + + using (myTimer.Scoped("foo")) + { + for (int i = 0; i < 5; i++) + { + using (myTimer.Scoped("bar")) + { + } + } + } + + var rootChildren = myTimer.RootNode.Children; + Assert.That(rootChildren, Contains.Key("foo")); + Assert.AreEqual(rootChildren["foo"].NumCalls, 1); + + var fooChildren = rootChildren["foo"].Children; + Assert.That(fooChildren, Contains.Key("bar")); + Assert.AreEqual(fooChildren["bar"].NumCalls, 5); + + myTimer.Reset(); + Assert.AreEqual(myTimer.RootNode.Children, null); + } + } +} diff --git a/UnitySDK/Assets/ML-Agents/Editor/Tests/TimerTest.cs.meta b/UnitySDK/Assets/ML-Agents/Editor/Tests/TimerTest.cs.meta new file mode 100644 index 0000000000..9824d01d30 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Editor/Tests/TimerTest.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: 506de2f6a1c74967a6f16ebf494c01d5 +timeCreated: 1569370981 \ No newline at end of file diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHardLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHardLearning.asset deleted file mode 100644 index ecd02329d6..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHardLearning.asset +++ /dev/null @@ -1,24 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: 3DBallHardLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 5 - numStackedVectorObservations: 9 - vectorActionSize: 02000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - vectorActionSpaceType: 1 - model: {fileID: 11400000, guid: cee7d20369b814d549573de7e76c4a81, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHardLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHardLearning.asset.meta deleted file mode 100644 index 0459776bbb..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHardLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 4f74e089fbb75455ebf6f0495e30be6e -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHardPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHardPlayer.asset deleted file mode 100644 index a091a17eae..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHardPlayer.asset +++ /dev/null @@ -1,31 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: 3DBallHardPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 5 - numStackedVectorObservations: 9 - vectorActionSize: 02000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - vectorActionSpaceType: 1 - keyContinuousPlayerActions: [] - axisContinuousPlayerActions: - - axis: Horizontal - index: 0 - scale: -1 - - axis: Vertical - index: 1 - scale: 1 - discretePlayerActions: [] diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHardPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHardPlayer.asset.meta deleted file mode 100644 index 0094fa2df2..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHardPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 55f48be32ac184c6ab67cf647100bac4 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHeuristic.asset b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHeuristic.asset deleted file mode 100644 index d3675f8df2..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHeuristic.asset +++ /dev/null @@ -1,26 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 943466ab374444748a364f9d6c3e2fe2, type: 3} - m_Name: 3DBallHeuristic - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 8 - numStackedVectorObservations: 1 - vectorActionSize: 02000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - vectorActionSpaceType: 1 - isExternal: 0 - decision: {fileID: 0} - decisionScript: {fileID: 11500000, guid: 67264e06e07fb40d8939b0860ebee773, type: 3} - c_decision: RandomDecision diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHeuristic.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHeuristic.asset.meta deleted file mode 100644 index b50b1826c5..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallHeuristic.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 3a0cd58e7f0764eea8952cb416295fd0 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallLearning.asset deleted file mode 100644 index 7931f6f3c6..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallLearning.asset +++ /dev/null @@ -1,24 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: 3DBallLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 8 - numStackedVectorObservations: 1 - vectorActionSize: 02000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - vectorActionSpaceType: 1 - model: {fileID: 11400000, guid: a0e8d1fda5a6f41be955d2b30479c2a1, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallLearning.asset.meta deleted file mode 100644 index 8d1b153f9e..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 383c589e8bb76464eadc2525b5b0f2c1 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallPlayer.asset deleted file mode 100644 index d17da26e43..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallPlayer.asset +++ /dev/null @@ -1,44 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: 3DBallPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 8 - numStackedVectorObservations: 1 - vectorActionSize: 02000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - vectorActionSpaceType: 1 - isExternal: 0 - keyContinuousPlayerActions: [] - axisContinuousPlayerActions: - - axis: Horizontal - index: 0 - scale: 1 - - axis: Vertical - index: 1 - scale: 1 - discretePlayerActions: - - key: 12 - branchIndex: 4 - value: 0 - - key: 0 - branchIndex: 4 - value: 0 - - key: 0 - branchIndex: 0 - value: 0 - - key: 0 - branchIndex: 0 - value: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallPlayer.asset.meta deleted file mode 100644 index c49320d90e..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains/3DBallPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 97d8f9d40dc8c452f932f7caa9549c7d -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Prefabs/3DBall.prefab b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Prefabs/3DBall.prefab index 0b47aa0369..80d7a2ceae 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Prefabs/3DBall.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Prefabs/3DBall.prefab @@ -71,7 +71,8 @@ GameObject: m_Component: - component: {fileID: 4780098186595842} - component: {fileID: 65010516625723872} - - component: {fileID: 114259948429386406} + - component: {fileID: 114368073295828880} + - component: {fileID: 114715123104194396} m_Layer: 0 m_Name: Agent m_TagString: Untagged @@ -597,7 +598,28 @@ BoxCollider: serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} ---- !u!114 &114259948429386406 +--- !u!114 &114368073295828880 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1424713891854676} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 8 + numStackedVectorObservations: 1 + vectorActionSize: 02000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 1 + m_Model: {fileID: 11400000, guid: 20a7b83be6b0c493d9271c65c897eb9b, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: 3DBall +--- !u!114 &114715123104194396 MonoBehaviour: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} @@ -608,10 +630,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: aaba48bf82bee4751aa7b89569e57f73, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 383c589e8bb76464eadc2525b5b0f2c1, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Prefabs/3DBallHardNew.prefab b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Prefabs/3DBallHardNew.prefab index 14a21a5fdc..8917c1bdbe 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Prefabs/3DBallHardNew.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Prefabs/3DBallHardNew.prefab @@ -138,6 +138,7 @@ GameObject: m_Component: - component: {fileID: 4895942152145390} - component: {fileID: 65170961617201804} + - component: {fileID: 114284317994838100} - component: {fileID: 114466000339026140} m_Layer: 0 m_Name: Agent @@ -597,6 +598,27 @@ BoxCollider: serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} +--- !u!114 &114284317994838100 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1829721031899636} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 5 + numStackedVectorObservations: 9 + vectorActionSize: 02000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 1 + m_Model: {fileID: 11400000, guid: 27d49984757ed46b181090a532ef48e5, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: 3DBallHard --- !u!114 &114466000339026140 MonoBehaviour: m_ObjectHideFlags: 1 @@ -608,10 +630,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: edf26e11cf4ed42eaa3ffb7b91bb4676, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 4f74e089fbb75455ebf6f0495e30be6e, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scenes/3DBall.unity b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scenes/3DBall.unity index 2a5e882a1f..a8ace0d1dd 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scenes/3DBall.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scenes/3DBall.unity @@ -38,7 +38,7 @@ RenderSettings: m_ReflectionIntensity: 1 m_CustomReflection: {fileID: 0} m_Sun: {fileID: 0} - m_IndirectSpecularColor: {r: 0.4497121, g: 0.4997778, b: 0.5756369, a: 1} + m_IndirectSpecularColor: {r: 0.44971162, g: 0.49977726, b: 0.5756362, a: 1} --- !u!157 &3 LightmapSettings: m_ObjectHideFlags: 0 @@ -156,6 +156,10 @@ Prefab: propertyPath: m_Name value: 3DBall (7) objectReference: {fileID: 0} + - target: {fileID: 1321468028730240, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} m_IsPrefabParent: 0 @@ -306,6 +310,10 @@ Prefab: propertyPath: m_Name value: 3DBall (5) objectReference: {fileID: 0} + - target: {fileID: 1321468028730240, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} m_IsPrefabParent: 0 @@ -352,6 +360,10 @@ Prefab: propertyPath: m_Name value: 3DBall (6) objectReference: {fileID: 0} + - target: {fileID: 1321468028730240, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} m_IsPrefabParent: 0 @@ -398,6 +410,10 @@ Prefab: propertyPath: m_Name value: 3DBall (3) objectReference: {fileID: 0} + - target: {fileID: 1321468028730240, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} m_IsPrefabParent: 0 @@ -575,6 +591,10 @@ Prefab: propertyPath: m_Name value: 3DBall (8) objectReference: {fileID: 0} + - target: {fileID: 1321468028730240, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} m_IsPrefabParent: 0 @@ -621,6 +641,10 @@ Prefab: propertyPath: m_Name value: 3DBall (9) objectReference: {fileID: 0} + - target: {fileID: 1321468028730240, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} m_IsPrefabParent: 0 @@ -667,6 +691,10 @@ Prefab: propertyPath: m_Name value: 3DBall (10) objectReference: {fileID: 0} + - target: {fileID: 1321468028730240, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} m_IsPrefabParent: 0 @@ -713,6 +741,10 @@ Prefab: propertyPath: m_Name value: 3DBall (1) objectReference: {fileID: 0} + - target: {fileID: 0} + propertyPath: m_BrainParameters.numStackedVectorObservations + value: 1 + objectReference: {fileID: 0} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} m_IsPrefabParent: 0 @@ -759,6 +791,10 @@ Prefab: propertyPath: m_Name value: 3DBall (11) objectReference: {fileID: 0} + - target: {fileID: 1321468028730240, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} m_IsPrefabParent: 0 @@ -805,6 +841,10 @@ Prefab: propertyPath: m_Name value: 3DBall (4) objectReference: {fileID: 0} + - target: {fileID: 1321468028730240, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} m_IsPrefabParent: 0 @@ -851,11 +891,6 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: eb15e3c3d55e54abaafb74c635b6a458, type: 3} m_Name: m_EditorClassIdentifier: - broadcastHub: - broadcastingBrains: - - {fileID: 11400000, guid: 383c589e8bb76464eadc2525b5b0f2c1, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 m_TrainingConfiguration: width: 300 height: 200 @@ -981,6 +1016,10 @@ Prefab: propertyPath: m_Name value: 3DBall (2) objectReference: {fileID: 0} + - target: {fileID: 1321468028730240, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: cfa81c019162c4e3caf6e2999c6fdf48, type: 2} m_IsPrefabParent: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scenes/3DBallHard.unity b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scenes/3DBallHard.unity index 75cbd4eb06..b82ddb3e03 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scenes/3DBallHard.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scenes/3DBallHard.unity @@ -38,7 +38,7 @@ RenderSettings: m_ReflectionIntensity: 1 m_CustomReflection: {fileID: 0} m_Sun: {fileID: 0} - m_IndirectSpecularColor: {r: 0.4497121, g: 0.4997778, b: 0.5756369, a: 1} + m_IndirectSpecularColor: {r: 0.44971162, g: 0.49977726, b: 0.5756362, a: 1} --- !u!157 &3 LightmapSettings: m_ObjectHideFlags: 0 @@ -672,10 +672,8 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: broadcastHub: - broadcastingBrains: + brainsToControl: - {fileID: 11400000, guid: 4f74e089fbb75455ebf6f0495e30be6e, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 m_TrainingConfiguration: width: 300 height: 200 diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DAgent.cs b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DAgent.cs index cdaa96beb2..2ab36cdbd9 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DAgent.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DAgent.cs @@ -26,22 +26,19 @@ public override void CollectObservations() public override void AgentAction(float[] vectorAction, string textAction) { - if (brain.brainParameters.vectorActionSpaceType == SpaceType.Continuous) - { - var actionZ = 2f * Mathf.Clamp(vectorAction[0], -1f, 1f); - var actionX = 2f * Mathf.Clamp(vectorAction[1], -1f, 1f); + var actionZ = 2f * Mathf.Clamp(vectorAction[0], -1f, 1f); + var actionX = 2f * Mathf.Clamp(vectorAction[1], -1f, 1f); - if ((gameObject.transform.rotation.z < 0.25f && actionZ > 0f) || - (gameObject.transform.rotation.z > -0.25f && actionZ < 0f)) - { - gameObject.transform.Rotate(new Vector3(0, 0, 1), actionZ); - } + if ((gameObject.transform.rotation.z < 0.25f && actionZ > 0f) || + (gameObject.transform.rotation.z > -0.25f && actionZ < 0f)) + { + gameObject.transform.Rotate(new Vector3(0, 0, 1), actionZ); + } - if ((gameObject.transform.rotation.x < 0.25f && actionX > 0f) || - (gameObject.transform.rotation.x > -0.25f && actionX < 0f)) - { - gameObject.transform.Rotate(new Vector3(1, 0, 0), actionX); - } + if ((gameObject.transform.rotation.x < 0.25f && actionX > 0f) || + (gameObject.transform.rotation.x > -0.25f && actionX < 0f)) + { + gameObject.transform.Rotate(new Vector3(1, 0, 0), actionX); } if ((ball.transform.position.y - gameObject.transform.position.y) < -2f || Mathf.Abs(ball.transform.position.x - gameObject.transform.position.x) > 3f || @@ -68,6 +65,15 @@ public override void AgentReset() SetResetParameters(); } + public override float[] Heuristic() + { + var action = new float[2]; + + action[0] = -Input.GetAxis("Horizontal"); + action[1] = Input.GetAxis("Vertical"); + return action; + } + public void SetBall() { //Set the attributes of the ball by fetching the information from the academy diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DDecision.cs b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DDecision.cs deleted file mode 100644 index 108d46a934..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DDecision.cs +++ /dev/null @@ -1,46 +0,0 @@ -using System.Collections.Generic; -using UnityEngine; -using MLAgents; - -public class Ball3DDecision : Decision -{ - public float rotationSpeed = 2f; - - public override float[] Decide( - List vectorObs, - List visualObs, - float reward, - bool done, - List memory) - { - if (brainParameters.vectorActionSpaceType == SpaceType.Continuous) - { - var act = new List(); - - // state[5] is the velocity of the ball in the x orientation. - // We use this number to control the Platform's z axis rotation speed, - // so that the Platform is tilted in the x orientation correspondingly. - act.Add(vectorObs[5] * rotationSpeed); - - // state[7] is the velocity of the ball in the z orientation. - // We use this number to control the Platform's x axis rotation speed, - // so that the Platform is tilted in the z orientation correspondingly. - act.Add(-vectorObs[7] * rotationSpeed); - - return act.ToArray(); - } - - // If the vector action space type is discrete, then we don't do anything. - return new[] { 1f }; - } - - public override List MakeMemory( - List vectorObs, - List visualObs, - float reward, - bool done, - List memory) - { - return new List(); - } -} diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DDecision.cs.meta b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DDecision.cs.meta deleted file mode 100755 index 3131680123..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DDecision.cs.meta +++ /dev/null @@ -1,12 +0,0 @@ -fileFormatVersion: 2 -guid: ccaa8f43c15264209b137d8dc26a8d63 -timeCreated: 1503595709 -licenseType: Free -MonoImporter: - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DHardAgent.cs b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DHardAgent.cs index a77c9c9c41..97be575f78 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DHardAgent.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/3DBall/Scripts/Ball3DHardAgent.cs @@ -25,22 +25,19 @@ public override void CollectObservations() public override void AgentAction(float[] vectorAction, string textAction) { - if (brain.brainParameters.vectorActionSpaceType == SpaceType.Continuous) - { - var actionZ = 2f * Mathf.Clamp(vectorAction[0], -1f, 1f); - var actionX = 2f * Mathf.Clamp(vectorAction[1], -1f, 1f); + var actionZ = 2f * Mathf.Clamp(vectorAction[0], -1f, 1f); + var actionX = 2f * Mathf.Clamp(vectorAction[1], -1f, 1f); - if ((gameObject.transform.rotation.z < 0.25f && actionZ > 0f) || - (gameObject.transform.rotation.z > -0.25f && actionZ < 0f)) - { - gameObject.transform.Rotate(new Vector3(0, 0, 1), actionZ); - } + if ((gameObject.transform.rotation.z < 0.25f && actionZ > 0f) || + (gameObject.transform.rotation.z > -0.25f && actionZ < 0f)) + { + gameObject.transform.Rotate(new Vector3(0, 0, 1), actionZ); + } - if ((gameObject.transform.rotation.x < 0.25f && actionX > 0f) || - (gameObject.transform.rotation.x > -0.25f && actionX < 0f)) - { - gameObject.transform.Rotate(new Vector3(1, 0, 0), actionX); - } + if ((gameObject.transform.rotation.x < 0.25f && actionX > 0f) || + (gameObject.transform.rotation.x > -0.25f && actionX < 0f)) + { + gameObject.transform.Rotate(new Vector3(1, 0, 0), actionX); } if ((ball.transform.position.y - gameObject.transform.position.y) < -2f || Mathf.Abs(ball.transform.position.x - gameObject.transform.position.x) > 3f || diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBall.nn b/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBall.nn new file mode 100644 index 0000000000..22af655d84 Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBall.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHardLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBall.nn.meta similarity index 81% rename from UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHardLearning.nn.meta rename to UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBall.nn.meta index 46bf49d31d..e41069f62e 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHardLearning.nn.meta +++ b/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBall.nn.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: cee7d20369b814d549573de7e76c4a81 +guid: 20a7b83be6b0c493d9271c65c897eb9b ScriptedImporter: userData: assetBundleName: diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHard.nn b/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHard.nn new file mode 100644 index 0000000000..66cfc5120b Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHard.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHard.nn.meta similarity index 81% rename from UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallLearning.nn.meta rename to UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHard.nn.meta index 7dfbeaac4d..c44dcc43e6 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallLearning.nn.meta +++ b/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHard.nn.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: a0e8d1fda5a6f41be955d2b30479c2a1 +guid: 27d49984757ed46b181090a532ef48e5 ScriptedImporter: userData: assetBundleName: diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHardLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHardLearning.nn deleted file mode 100644 index d3d28e9c87..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallHardLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallLearning.nn deleted file mode 100644 index 6b76be5c1c..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/3DBallLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Basic/Brains.meta b/UnitySDK/Assets/ML-Agents/Examples/Basic/Brains.meta deleted file mode 100644 index 18708573e0..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Basic/Brains.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 2f1b2f6cfea324c8990b57e271fc0d40 -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Basic/Brains/BasicLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/Basic/Brains/BasicLearning.asset deleted file mode 100644 index eff473d11d..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Basic/Brains/BasicLearning.asset +++ /dev/null @@ -1,23 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: BasicLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 20 - numStackedVectorObservations: 1 - vectorActionSize: 03000000 - cameraResolutions: [] - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - model: {fileID: 11400000, guid: 53fa7c392ce3c492281be273668f6aaf, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Basic/Brains/BasicLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Basic/Brains/BasicLearning.asset.meta deleted file mode 100644 index 08d0b840fa..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Basic/Brains/BasicLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: e5cf0e35e16264ea483f8863e5115c3c -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Basic/Brains/BasicPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/Basic/Brains/BasicPlayer.asset deleted file mode 100644 index 056aa3ce6b..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Basic/Brains/BasicPlayer.asset +++ /dev/null @@ -1,30 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: BasicPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 20 - numStackedVectorObservations: 1 - vectorActionSize: 03000000 - cameraResolutions: [] - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - keyContinuousPlayerActions: [] - axisContinuousPlayerActions: [] - discretePlayerActions: - - key: 97 - branchIndex: 0 - value: 1 - - key: 100 - branchIndex: 0 - value: 2 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Basic/Brains/BasicPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Basic/Brains/BasicPlayer.asset.meta deleted file mode 100644 index 38bbe8567b..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Basic/Brains/BasicPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 1adbe3db6a2f94bf2b1e22a29b955387 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Basic/Prefabs/Basic.prefab b/UnitySDK/Assets/ML-Agents/Examples/Basic/Prefabs/Basic.prefab index 221f4aee11..f9d4ab6e74 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Basic/Prefabs/Basic.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/Basic/Prefabs/Basic.prefab @@ -105,6 +105,7 @@ GameObject: m_Component: - component: {fileID: 4170723581433160} - component: {fileID: 65968285873374238} + - component: {fileID: 114502619508238574} - component: {fileID: 114827551040495112} m_Layer: 0 m_Name: BasicAgent @@ -896,6 +897,28 @@ BoxCollider: serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} +--- !u!114 &114502619508238574 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1263463520136984} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 20 + numStackedVectorObservations: 1 + vectorActionSize: 03000000 + cameraResolutions: [] + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 11400000, guid: 53fa7c392ce3c492281be273668f6aaf, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: Basic --- !u!114 &114827551040495112 MonoBehaviour: m_ObjectHideFlags: 1 @@ -907,7 +930,6 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 624480a72e46148118ab2e2d89b537de, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: e5cf0e35e16264ea483f8863e5115c3c, type: 2} agentParameters: agentCameras: [] agentRenderTextures: [] diff --git a/UnitySDK/Assets/ML-Agents/Examples/Basic/Scenes/Basic.unity b/UnitySDK/Assets/ML-Agents/Examples/Basic/Scenes/Basic.unity index 352117abb6..930ff1d095 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Basic/Scenes/Basic.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/Basic/Scenes/Basic.unity @@ -283,10 +283,8 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: broadcastHub: - broadcastingBrains: + brainsToControl: - {fileID: 11400000, guid: e5cf0e35e16264ea483f8863e5115c3c, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 m_TrainingConfiguration: width: 80 height: 80 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Basic/Scripts/BasicAgent.cs b/UnitySDK/Assets/ML-Agents/Examples/Basic/Scripts/BasicAgent.cs index 7903eb0d9a..bc7ed3f1d0 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Basic/Scripts/BasicAgent.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/Basic/Scripts/BasicAgent.cs @@ -73,6 +73,19 @@ public override void AgentReset() largeGoal.transform.position = new Vector3(m_LargeGoalPosition - 10f, 0f, 0f); } + public override float[] Heuristic() + { + if (Input.GetKey(KeyCode.D)) + { + return new float[] { 2 }; + } + if (Input.GetKey(KeyCode.A)) + { + return new float[] { 1 }; + } + return new float[] { 0 }; + } + public override void AgentOnDone() { } diff --git a/UnitySDK/Assets/ML-Agents/Examples/Basic/Scripts/BasicDecision.cs b/UnitySDK/Assets/ML-Agents/Examples/Basic/Scripts/BasicDecision.cs deleted file mode 100644 index 5710545f53..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Basic/Scripts/BasicDecision.cs +++ /dev/null @@ -1,26 +0,0 @@ -using System.Collections.Generic; -using UnityEngine; -using MLAgents; - -public class BasicDecision : Decision -{ - public override float[] Decide( - List vectorObs, - List visualObs, - float reward, - bool done, - List memory) - { - return new[] { 1f }; - } - - public override List MakeMemory( - List vectorObs, - List visualObs, - float reward, - bool done, - List memory) - { - return new List(); - } -} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Basic/Scripts/BasicDecision.cs.meta b/UnitySDK/Assets/ML-Agents/Examples/Basic/Scripts/BasicDecision.cs.meta deleted file mode 100644 index 7f5d5b1bbc..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Basic/Scripts/BasicDecision.cs.meta +++ /dev/null @@ -1,12 +0,0 @@ -fileFormatVersion: 2 -guid: 99399d2439f894b149d8e67b85b6e07a -timeCreated: 1503355437 -licenseType: Free -MonoImporter: - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Basic/TFModels/Basic.nn b/UnitySDK/Assets/ML-Agents/Examples/Basic/TFModels/Basic.nn new file mode 100644 index 0000000000..d30b05bfa0 Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/Basic/TFModels/Basic.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Basic/TFModels/BasicLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Basic/TFModels/Basic.nn.meta similarity index 81% rename from UnitySDK/Assets/ML-Agents/Examples/Basic/TFModels/BasicLearning.nn.meta rename to UnitySDK/Assets/ML-Agents/Examples/Basic/TFModels/Basic.nn.meta index 75621c252a..5fc29a0f4f 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Basic/TFModels/BasicLearning.nn.meta +++ b/UnitySDK/Assets/ML-Agents/Examples/Basic/TFModels/Basic.nn.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 53fa7c392ce3c492281be273668f6aaf +guid: 468c183196f1844f69e125c99dd135a1 ScriptedImporter: userData: assetBundleName: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Basic/TFModels/BasicLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/Basic/TFModels/BasicLearning.nn deleted file mode 100644 index ba9513c5f1..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/Basic/TFModels/BasicLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Brains.meta b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Brains.meta deleted file mode 100644 index 0ca1343ad4..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Brains.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: db94c867000904ac38f30df6a537e76f -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Brains/BouncerLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Brains/BouncerLearning.asset deleted file mode 100644 index c0aff57a96..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Brains/BouncerLearning.asset +++ /dev/null @@ -1,25 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: BouncerLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 6 - numStackedVectorObservations: 3 - vectorActionSize: 03000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - - - vectorActionSpaceType: 1 - model: {fileID: 11400000, guid: f5250a39cb2134db49b833e3c92527a1, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Brains/BouncerLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Brains/BouncerLearning.asset.meta deleted file mode 100644 index 18b9628815..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Brains/BouncerLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 573920e3a672d40038169c7ffdbdca05 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Brains/BouncerPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Brains/BouncerPlayer.asset deleted file mode 100644 index 553f950cf0..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Brains/BouncerPlayer.asset +++ /dev/null @@ -1,41 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: BouncerPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 6 - numStackedVectorObservations: 3 - vectorActionSize: 03000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - - - vectorActionSpaceType: 1 - keyContinuousPlayerActions: - - key: 97 - index: 0 - value: -1 - - key: 100 - index: 0 - value: 1 - - key: 115 - index: 2 - value: -1 - - key: 119 - index: 2 - value: 1 - - key: 32 - index: 1 - value: 1 - axisContinuousPlayerActions: [] - discretePlayerActions: [] diff --git a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Brains/BouncerPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Brains/BouncerPlayer.asset.meta deleted file mode 100644 index 69da83a91d..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Brains/BouncerPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 5527511df7b944e8e9177dd69db5a9c1 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Prefabs/Environment.prefab b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Prefabs/Environment.prefab index e7d71645e4..dc7003a97a 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Prefabs/Environment.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Prefabs/Environment.prefab @@ -144,6 +144,7 @@ GameObject: - component: {fileID: 33085749764809866} - component: {fileID: 65800894914404220} - component: {fileID: 54030303118153432} + - component: {fileID: 114938751572484598} - component: {fileID: 114878620968301562} m_Layer: 0 m_Name: Agent @@ -826,7 +827,6 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 0f09741cbce2e44bc88d3e92917eea0e, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 573920e3a672d40038169c7ffdbdca05, type: 2} agentParameters: agentCameras: [] agentRenderTextures: [] @@ -837,3 +837,25 @@ MonoBehaviour: target: {fileID: 1160631129428284} bodyObject: {fileID: 1680588139522898} strength: 500 +--- !u!114 &114938751572484598 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1604827395706042} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 6 + numStackedVectorObservations: 3 + vectorActionSize: 03000000 + cameraResolutions: [] + vectorActionDescriptions: [] + vectorActionSpaceType: 1 + m_Model: {fileID: 11400000, guid: f5250a39cb2134db49b833e3c92527a1, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: Bouncer diff --git a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scenes/Bouncer.unity b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scenes/Bouncer.unity index 7471c656fd..7161527a53 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scenes/Bouncer.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scenes/Bouncer.unity @@ -836,10 +836,8 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: broadcastHub: - broadcastingBrains: + brainsToControl: - {fileID: 11400000, guid: 573920e3a672d40038169c7ffdbdca05, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 m_TrainingConfiguration: width: 80 height: 80 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scenes/BouncerIL.unity b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scenes/BouncerIL.unity deleted file mode 100644 index 67e54dccd3..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scenes/BouncerIL.unity +++ /dev/null @@ -1,1235 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!29 &1 -OcclusionCullingSettings: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_OcclusionBakeSettings: - smallestOccluder: 5 - smallestHole: 0.25 - backfaceThreshold: 100 - m_SceneGUID: 00000000000000000000000000000000 - m_OcclusionCullingData: {fileID: 0} ---- !u!104 &2 -RenderSettings: - m_ObjectHideFlags: 0 - serializedVersion: 8 - m_Fog: 0 - m_FogColor: {r: 0.5, g: 0.5, b: 0.5, a: 1} - m_FogMode: 3 - m_FogDensity: 0.01 - m_LinearFogStart: 0 - m_LinearFogEnd: 300 - m_AmbientSkyColor: {r: 0.8, g: 0.8, b: 0.8, a: 1} - m_AmbientEquatorColor: {r: 0.114, g: 0.125, b: 0.133, a: 1} - m_AmbientGroundColor: {r: 0.047, g: 0.043, b: 0.035, a: 1} - m_AmbientIntensity: 1 - m_AmbientMode: 3 - m_SubtractiveShadowColor: {r: 0.42, g: 0.478, b: 0.627, a: 1} - m_SkyboxMaterial: {fileID: 10304, guid: 0000000000000000f000000000000000, type: 0} - m_HaloStrength: 0.5 - m_FlareStrength: 1 - m_FlareFadeSpeed: 3 - m_HaloTexture: {fileID: 0} - m_SpotCookie: {fileID: 10001, guid: 0000000000000000e000000000000000, type: 0} - m_DefaultReflectionMode: 0 - m_DefaultReflectionResolution: 128 - m_ReflectionBounces: 1 - m_ReflectionIntensity: 1 - m_CustomReflection: {fileID: 0} - m_Sun: {fileID: 0} - m_IndirectSpecularColor: {r: 0, g: 0, b: 0, a: 1} ---- !u!157 &3 -LightmapSettings: - m_ObjectHideFlags: 0 - serializedVersion: 11 - m_GIWorkflowMode: 1 - m_GISettings: - serializedVersion: 2 - m_BounceScale: 1 - m_IndirectOutputScale: 1 - m_AlbedoBoost: 1 - m_TemporalCoherenceThreshold: 1 - m_EnvironmentLightingMode: 0 - m_EnableBakedLightmaps: 1 - m_EnableRealtimeLightmaps: 1 - m_LightmapEditorSettings: - serializedVersion: 9 - m_Resolution: 2 - m_BakeResolution: 40 - m_TextureWidth: 1024 - m_TextureHeight: 1024 - m_AO: 0 - m_AOMaxDistance: 1 - m_CompAOExponent: 1 - m_CompAOExponentDirect: 0 - m_Padding: 2 - m_LightmapParameters: {fileID: 0} - m_LightmapsBakeMode: 1 - m_TextureCompression: 1 - m_FinalGather: 0 - m_FinalGatherFiltering: 1 - m_FinalGatherRayCount: 256 - m_ReflectionCompression: 2 - m_MixedBakeMode: 2 - m_BakeBackend: 0 - m_PVRSampling: 1 - m_PVRDirectSampleCount: 32 - m_PVRSampleCount: 500 - m_PVRBounces: 2 - m_PVRFilterTypeDirect: 0 - m_PVRFilterTypeIndirect: 0 - m_PVRFilterTypeAO: 0 - m_PVRFilteringMode: 1 - m_PVRCulling: 1 - m_PVRFilteringGaussRadiusDirect: 1 - m_PVRFilteringGaussRadiusIndirect: 5 - m_PVRFilteringGaussRadiusAO: 2 - m_PVRFilteringAtrousPositionSigmaDirect: 0.5 - m_PVRFilteringAtrousPositionSigmaIndirect: 2 - m_PVRFilteringAtrousPositionSigmaAO: 1 - m_ShowResolutionOverlay: 1 - m_LightingDataAsset: {fileID: 0} - m_UseShadowmask: 1 ---- !u!196 &4 -NavMeshSettings: - serializedVersion: 2 - m_ObjectHideFlags: 0 - m_BuildSettings: - serializedVersion: 2 - agentTypeID: 0 - agentRadius: 0.5 - agentHeight: 2 - agentSlope: 45 - agentClimb: 0.4 - ledgeDropHeight: 0 - maxJumpAcrossDistance: 0 - minRegionArea: 2 - manualCellSize: 0 - cellSize: 0.16666667 - manualTileSize: 0 - tileSize: 256 - accuratePlacement: 0 - debug: - m_Flags: 0 - m_NavMeshData: {fileID: 0} ---- !u!1001 &99663122 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.x - value: 0.31598538 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.y - value: -0.3596048 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.z - value: 0.13088542 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.w - value: 0.8681629 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_RootOrder - value: 2 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - m_IsPrefabParent: 0 ---- !u!1 &178998667 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 1323881343079198, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 178998668} - m_Layer: 0 - m_Name: TeacherEnvironment - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &178998668 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 4119871321101852, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 178998667} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: -25, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 1737130313} - - {fileID: 239995338} - - {fileID: 975123350} - m_Father: {fileID: 0} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &239995337 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 1604827395706042, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 239995338} - - component: {fileID: 239995342} - - component: {fileID: 239995341} - - component: {fileID: 239995340} - - component: {fileID: 239995339} - m_Layer: 0 - m_Name: Agent - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &239995338 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 4871476620936902, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 239995337} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 1.03, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 1271806653} - m_Father: {fileID: 178998668} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!114 &239995339 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 114878620968301562, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 239995337} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 0f09741cbce2e44bc88d3e92917eea0e, type: 3} - m_Name: - m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 5527511df7b944e8e9177dd69db5a9c1, type: 2} - agentParameters: - agentCameras: [] - agentRenderTextures: [] - maxStep: 0 - resetOnDone: 1 - onDemandDecision: 1 - numberOfActionsBetweenDecisions: 1 - target: {fileID: 1737130312} - bodyObject: {fileID: 1271806654} - strength: 500 ---- !u!54 &239995340 -Rigidbody: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 54030303118153432, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 239995337} - serializedVersion: 2 - m_Mass: 1 - m_Drag: 0 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 112 - m_CollisionDetection: 0 ---- !u!65 &239995341 -BoxCollider: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 65800894914404220, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 239995337} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!33 &239995342 -MeshFilter: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 33085749764809866, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 239995337} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!1001 &499557512 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 2116682880} - m_Modifications: - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_LocalPosition.x - value: 2.93 - objectReference: {fileID: 0} - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_LocalPosition.y - value: 1.71 - objectReference: {fileID: 0} - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_LocalRotation.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_RootOrder - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 1581263334907646, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_TagString - value: Untagged - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - m_IsPrefabParent: 0 ---- !u!1 &499557513 stripped -GameObject: - m_PrefabParentObject: {fileID: 1581263334907646, guid: c0c0b1278a6664082aaf303c5a1b995e, - type: 2} - m_PrefabInternal: {fileID: 499557512} ---- !u!4 &499557514 stripped -Transform: - m_PrefabParentObject: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, - type: 2} - m_PrefabInternal: {fileID: 499557512} ---- !u!1 &947388804 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 1102660893720968, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 947388805} - - component: {fileID: 947388808} - - component: {fileID: 947388807} - - component: {fileID: 947388806} - m_Layer: 0 - m_Name: Border - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &947388805 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 4882698780657638, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 947388804} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.6, z: 0} - m_LocalScale: {x: 1.075, y: 2, z: 1.075} - m_Children: [] - m_Father: {fileID: 970250378} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!23 &947388806 -MeshRenderer: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 23983449014655026, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 947388804} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 69fefdd39d2b34b169e921910bed9c0d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!65 &947388807 -BoxCollider: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 65907152988372220, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 947388804} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!33 &947388808 -MeshFilter: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 33719038229885808, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 947388804} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!1 &970250377 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 1235871181142350, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 970250378} - - component: {fileID: 970250381} - - component: {fileID: 970250380} - - component: {fileID: 970250379} - m_Layer: 0 - m_Name: Ground - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &970250378 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 4106598078905870, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 970250377} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 40, y: 1, z: 40} - m_Children: - - {fileID: 947388805} - m_Father: {fileID: 2116682880} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!23 &970250379 -MeshRenderer: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 23749734539917198, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 970250377} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: acba6bf2a290a496bb8989b42bf8698d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!65 &970250380 -BoxCollider: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 65853139978160230, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 970250377} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!33 &970250381 -MeshFilter: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 33382629305220744, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 970250377} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!1 &975123349 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 1235871181142350, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 975123350} - - component: {fileID: 975123353} - - component: {fileID: 975123352} - - component: {fileID: 975123351} - m_Layer: 0 - m_Name: Ground - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &975123350 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 4106598078905870, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 975123349} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 40, y: 1, z: 40} - m_Children: - - {fileID: 1419736813} - m_Father: {fileID: 178998668} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!23 &975123351 -MeshRenderer: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 23749734539917198, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 975123349} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: acba6bf2a290a496bb8989b42bf8698d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!65 &975123352 -BoxCollider: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 65853139978160230, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 975123349} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!33 &975123353 -MeshFilter: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 33382629305220744, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 975123349} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!1001 &1271806652 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 239995338} - m_Modifications: - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_LocalRotation.x - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_LocalRotation.y - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_LocalRotation.z - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_LocalRotation.w - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_RootOrder - value: 0 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - m_IsPrefabParent: 0 ---- !u!4 &1271806653 stripped -Transform: - m_PrefabParentObject: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, - type: 2} - m_PrefabInternal: {fileID: 1271806652} ---- !u!1 &1271806654 stripped -GameObject: - m_PrefabParentObject: {fileID: 1293290366791352, guid: 70d695e1d8399400bb0f2873bdf29bb0, - type: 2} - m_PrefabInternal: {fileID: 1271806652} ---- !u!1 &1419736812 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 1102660893720968, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1419736813} - - component: {fileID: 1419736816} - - component: {fileID: 1419736815} - - component: {fileID: 1419736814} - m_Layer: 0 - m_Name: Border - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &1419736813 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 4882698780657638, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1419736812} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.6, z: 0} - m_LocalScale: {x: 1.075, y: 2, z: 1.075} - m_Children: [] - m_Father: {fileID: 975123350} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!23 &1419736814 -MeshRenderer: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 23983449014655026, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1419736812} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 69fefdd39d2b34b169e921910bed9c0d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!65 &1419736815 -BoxCollider: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 65907152988372220, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1419736812} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!33 &1419736816 -MeshFilter: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 33719038229885808, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1419736812} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!1 &1453982293 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1453982295} - - component: {fileID: 1453982294} - m_Layer: 0 - m_Name: Academy - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!114 &1453982294 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1453982293} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 7223013998783490db672119a97e1fdb, type: 3} - m_Name: - m_EditorClassIdentifier: - broadcastHub: - broadcastingBrains: - - {fileID: 11400000, guid: 573920e3a672d40038169c7ffdbdca05, type: 2} - - {fileID: 11400000, guid: 5527511df7b944e8e9177dd69db5a9c1, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 - m_TrainingConfiguration: - width: 80 - height: 80 - qualityLevel: 1 - timeScale: 100 - targetFrameRate: -1 - m_InferenceConfiguration: - width: 1280 - height: 720 - qualityLevel: 5 - timeScale: 1 - targetFrameRate: 60 - resetParameters: - m_ResetParameters: - - key: target_scale - value: 1 - gravityMultiplier: 2 ---- !u!4 &1453982295 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1453982293} - m_LocalRotation: {x: -0, y: -0, z: 0.7071068, w: 0.7071068} - m_LocalPosition: {x: 0, y: 1, z: 0} - m_LocalScale: {x: 72.52305, y: 72.39882, z: 72.52292} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1001 &1556090281 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_RootOrder - value: 5 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchoredPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchoredPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_SizeDelta.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_SizeDelta.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMin.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMin.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMax.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMax.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_Pivot.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_Pivot.y - value: 0 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 3ce107b4a79bc4eef83afde434932a68, type: 2} - m_IsPrefabParent: 0 ---- !u!1001 &1712169739 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 1763764700} - m_Modifications: - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_LocalRotation.x - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_LocalRotation.y - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_LocalRotation.z - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_LocalRotation.w - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_RootOrder - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 1293290366791352, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - propertyPath: m_Name - value: AgentCube_Blue - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 70d695e1d8399400bb0f2873bdf29bb0, type: 2} - m_IsPrefabParent: 0 ---- !u!4 &1712169740 stripped -Transform: - m_PrefabParentObject: {fileID: 4521296116772160, guid: 70d695e1d8399400bb0f2873bdf29bb0, - type: 2} - m_PrefabInternal: {fileID: 1712169739} ---- !u!1 &1712169741 stripped -GameObject: - m_PrefabParentObject: {fileID: 1293290366791352, guid: 70d695e1d8399400bb0f2873bdf29bb0, - type: 2} - m_PrefabInternal: {fileID: 1712169739} ---- !u!1001 &1737130311 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 178998668} - m_Modifications: - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_LocalPosition.x - value: 2.93 - objectReference: {fileID: 0} - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_LocalPosition.y - value: 1.71 - objectReference: {fileID: 0} - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_LocalRotation.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_RootOrder - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 1581263334907646, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - propertyPath: m_TagString - value: Untagged - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: c0c0b1278a6664082aaf303c5a1b995e, type: 2} - m_IsPrefabParent: 0 ---- !u!1 &1737130312 stripped -GameObject: - m_PrefabParentObject: {fileID: 1581263334907646, guid: c0c0b1278a6664082aaf303c5a1b995e, - type: 2} - m_PrefabInternal: {fileID: 1737130311} ---- !u!4 &1737130313 stripped -Transform: - m_PrefabParentObject: {fileID: 4531230721628136, guid: c0c0b1278a6664082aaf303c5a1b995e, - type: 2} - m_PrefabInternal: {fileID: 1737130311} ---- !u!1 &1763764699 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 1604827395706042, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1763764700} - - component: {fileID: 1763764704} - - component: {fileID: 1763764703} - - component: {fileID: 1763764702} - - component: {fileID: 1763764701} - m_Layer: 0 - m_Name: Agent - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &1763764700 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 4871476620936902, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1763764699} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 1.03, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 1712169740} - m_Father: {fileID: 2116682880} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!114 &1763764701 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 114878620968301562, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1763764699} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 0f09741cbce2e44bc88d3e92917eea0e, type: 3} - m_Name: - m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 573920e3a672d40038169c7ffdbdca05, type: 2} - agentParameters: - agentCameras: [] - agentRenderTextures: [] - maxStep: 0 - resetOnDone: 1 - onDemandDecision: 1 - numberOfActionsBetweenDecisions: 1 - target: {fileID: 499557513} - bodyObject: {fileID: 1712169741} - strength: 500 ---- !u!54 &1763764702 -Rigidbody: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 54030303118153432, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1763764699} - serializedVersion: 2 - m_Mass: 1 - m_Drag: 0 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 112 - m_CollisionDetection: 0 ---- !u!65 &1763764703 -BoxCollider: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 65800894914404220, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1763764699} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!33 &1763764704 -MeshFilter: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 33085749764809866, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1763764699} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!1 &2080114702 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 2080114706} - - component: {fileID: 2080114705} - - component: {fileID: 2080114704} - - component: {fileID: 2080114703} - m_Layer: 0 - m_Name: Main Camera - m_TagString: MainCamera - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!81 &2080114703 -AudioListener: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 2080114702} - m_Enabled: 1 ---- !u!124 &2080114704 -Behaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 2080114702} - m_Enabled: 1 ---- !u!20 &2080114705 -Camera: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 2080114702} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.46666667, g: 0.5647059, b: 0.60784316, a: 1} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: -1 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!4 &2080114706 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 2080114702} - m_LocalRotation: {x: 0.42261827, y: 0, z: 0, w: 0.9063079} - m_LocalPosition: {x: 0, y: 50, z: -42.63} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 50, y: 0, z: 0} ---- !u!1 &2116682879 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 1323881343079198, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 2116682880} - m_Layer: 0 - m_Name: StudentEnvironment - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &2116682880 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 4119871321101852, guid: e2c4e1ad4f2224d34bb09d20f26b3207, - type: 2} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 2116682879} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 25, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 499557514} - - {fileID: 1763764700} - - {fileID: 970250378} - m_Father: {fileID: 0} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scripts/BouncerAgent.cs b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scripts/BouncerAgent.cs index 3626515fca..e72bca3621 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scripts/BouncerAgent.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scripts/BouncerAgent.cs @@ -104,6 +104,16 @@ private void FixedUpdate() } } + public override float[] Heuristic() + { + var action = new float[3]; + + action[0] = Input.GetAxis("Horizontal"); + action[1] = Input.GetKey(KeyCode.Space) ? 1.0f : 0.0f; + action[2] = Input.GetAxis("Vertical"); + return action; + } + private void Update() { if (m_LookDir.magnitude > float.Epsilon) diff --git a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/TFModels/Bouncer.nn b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/TFModels/Bouncer.nn new file mode 100644 index 0000000000..4d65955bb1 Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/TFModels/Bouncer.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/TFModels/BouncerLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/TFModels/Bouncer.nn.meta similarity index 81% rename from UnitySDK/Assets/ML-Agents/Examples/Bouncer/TFModels/BouncerLearning.nn.meta rename to UnitySDK/Assets/ML-Agents/Examples/Bouncer/TFModels/Bouncer.nn.meta index 49eb6cd44c..7bb7bc530f 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/TFModels/BouncerLearning.nn.meta +++ b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/TFModels/Bouncer.nn.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: f5250a39cb2134db49b833e3c92527a1 +guid: 6c4ee6ab37d9b49b492a5cc49ed47ca0 ScriptedImporter: userData: assetBundleName: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/TFModels/BouncerLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/Bouncer/TFModels/BouncerLearning.nn deleted file mode 100644 index 09a4681803..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/TFModels/BouncerLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Brains.meta b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Brains.meta deleted file mode 100644 index 4cb65ecd9b..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Brains.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 3c8c4bbf596d74ecfa02cfdd8a9a0b2f -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Brains/CrawlerDynamicLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Brains/CrawlerDynamicLearning.asset deleted file mode 100644 index 5413cffc8b..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Brains/CrawlerDynamicLearning.asset +++ /dev/null @@ -1,42 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: CrawlerDynamicLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 126 - numStackedVectorObservations: 1 - vectorActionSize: 14000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - vectorActionSpaceType: 1 - model: {fileID: 11400000, guid: abc9c8f2180154ed7ba3f116ab0beb90, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Brains/CrawlerDynamicLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Brains/CrawlerDynamicLearning.asset.meta deleted file mode 100644 index e259c5ffb5..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Brains/CrawlerDynamicLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 0e3b44d36c7a047c4addb92457b12be5 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Brains/CrawlerStaticLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Brains/CrawlerStaticLearning.asset deleted file mode 100644 index b2882d5a84..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Brains/CrawlerStaticLearning.asset +++ /dev/null @@ -1,42 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: CrawlerStaticLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 126 - numStackedVectorObservations: 1 - vectorActionSize: 14000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - vectorActionSpaceType: 1 - model: {fileID: 11400000, guid: 48982d8fa360a4ed0bb265495e4f378b, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Brains/CrawlerStaticLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Brains/CrawlerStaticLearning.asset.meta deleted file mode 100644 index f197181a7d..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Brains/CrawlerStaticLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 0505e961608004377974940ed17e03d5 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 0 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Prefabs/Crawler.prefab b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Prefabs/Crawler.prefab deleted file mode 100644 index 79df682b2f..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Prefabs/Crawler.prefab +++ /dev/null @@ -1,2521 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!1001 &100100000 -Prefab: - m_ObjectHideFlags: 1 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: [] - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 0} - m_RootGameObject: {fileID: 1129387478859796} - m_IsPrefabParent: 1 ---- !u!1 &1114398910245350 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4315486672294942} - - component: {fileID: 33942247511407576} - - component: {fileID: 136766532690346814} - - component: {fileID: 23057739860601414} - - component: {fileID: 54016577210578658} - - component: {fileID: 153407740652563728} - - component: {fileID: 114805933462601644} - m_Layer: 0 - m_Name: leg0 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1129387478859796 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4283901937423508} - - component: {fileID: 114824265617332224} - - component: {fileID: 114366290264684242} - m_Layer: 0 - m_Name: Crawler - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1187688929114980 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4895871576690222} - - component: {fileID: 33554492852954920} - - component: {fileID: 136460960731582558} - - component: {fileID: 23573846879420774} - - component: {fileID: 54496921079023298} - - component: {fileID: 153208165644155028} - - component: {fileID: 114089784393623120} - m_Layer: 0 - m_Name: foreleg3 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1272060045703202 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4320635105751664} - - component: {fileID: 33648152897957930} - - component: {fileID: 136131153676953572} - - component: {fileID: 23077321719373320} - - component: {fileID: 54132359757210108} - - component: {fileID: 153987082673184770} - - component: {fileID: 114201511158296780} - m_Layer: 0 - m_Name: leg1 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1298268328268866 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4042226991863502} - - component: {fileID: 33247528189479878} - - component: {fileID: 136674299004804612} - - component: {fileID: 23407712350003930} - - component: {fileID: 54279887023973758} - - component: {fileID: 153842898101386942} - - component: {fileID: 114744867262310804} - m_Layer: 0 - m_Name: leg3 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1374302196839222 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4345259575467704} - - component: {fileID: 33600319474102400} - - component: {fileID: 136690235415233670} - - component: {fileID: 23151895309585212} - - component: {fileID: 54218381078267390} - - component: {fileID: 153765018551042116} - - component: {fileID: 114245671031386664} - m_Layer: 0 - m_Name: foreleg0 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1396782062923916 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4259242127147388} - - component: {fileID: 33136431790211548} - - component: {fileID: 23868441579563002} - m_Layer: 0 - m_Name: grounded viz - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1405459036737686 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4011799289864642} - - component: {fileID: 33421992822520258} - - component: {fileID: 23237912265456852} - m_Layer: 0 - m_Name: grounded viz - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1441235561015404 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4554064857129596} - - component: {fileID: 33046987671380990} - - component: {fileID: 136669811194203340} - - component: {fileID: 23032058032275540} - - component: {fileID: 54016370531151242} - - component: {fileID: 153558694387803708} - - component: {fileID: 114701352747735826} - m_Layer: 0 - m_Name: foreleg1 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1449857773419816 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4340731500216736} - - component: {fileID: 33947674706028178} - - component: {fileID: 23458909467204720} - m_Layer: 0 - m_Name: grounded viz - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1515584191522134 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4690632006859362} - - component: {fileID: 33920999609521594} - - component: {fileID: 23271479376984230} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1528865102058454 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4057600146278330} - - component: {fileID: 33332933015356954} - - component: {fileID: 23350468638451092} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1572738585910590 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4588771957217034} - - component: {fileID: 33237114310346146} - - component: {fileID: 136143404163221586} - - component: {fileID: 23682678482966492} - - component: {fileID: 54341884066505438} - - component: {fileID: 153475598826299386} - - component: {fileID: 114118084730610894} - m_Layer: 0 - m_Name: foreleg2 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1726653501711894 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4785798153510720} - - component: {fileID: 33763557645324940} - - component: {fileID: 136693614685681770} - - component: {fileID: 23857179722177926} - m_Layer: 0 - m_Name: Sweatband - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1828705975967124 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4415045083540138} - - component: {fileID: 33505539604743060} - - component: {fileID: 135940545379391998} - - component: {fileID: 23408614112088478} - - component: {fileID: 54419147063662066} - - component: {fileID: 114418822382166060} - m_Layer: 0 - m_Name: Body - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1919512060766040 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4829345825963888} - - component: {fileID: 33580637687345318} - - component: {fileID: 23682645520376206} - m_Layer: 0 - m_Name: mouth - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1932904558829096 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4900039245054002} - - component: {fileID: 33811741900901180} - - component: {fileID: 23636497540812122} - m_Layer: 0 - m_Name: grounded viz - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1942253916102162 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4586389951765900} - - component: {fileID: 33421385354278630} - - component: {fileID: 136060786617938986} - - component: {fileID: 23214116999615168} - - component: {fileID: 54350686216978830} - - component: {fileID: 153258788415880078} - - component: {fileID: 114754976708400114} - m_Layer: 0 - m_Name: leg2 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &4011799289864642 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1405459036737686} - m_LocalRotation: {x: 0.000000044703484, y: -0.7071068, z: -0.7071068, w: 0.000000044703484} - m_LocalPosition: {x: 0, y: 0.85, z: 0} - m_LocalScale: {x: 1, y: 1, z: 0.3846154} - m_Children: [] - m_Father: {fileID: 4588771957217034} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4042226991863502 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1298268328268866} - m_LocalRotation: {x: -0.5, y: -0.5, z: -0.5, w: 0.5} - m_LocalPosition: {x: 0.90000004, y: 0, z: 0} - m_LocalScale: {x: 0.3, y: 0.48000023, z: 0.30000028} - m_Children: [] - m_Father: {fileID: 4283901937423508} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: -90, y: -90, z: 0} ---- !u!4 &4057600146278330 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1528865102058454} - m_LocalRotation: {x: -0.22353469, y: 0.0000001490116, z: 0.00000004470348, w: 0.97469604} - m_LocalPosition: {x: -0.198, y: 0.12499996, z: 0.437} - m_LocalScale: {x: 0.12858818, y: 0.12858823, z: 0.073603} - m_Children: [] - m_Father: {fileID: 4415045083540138} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: -25.833002, y: 0, z: 0} ---- !u!4 &4259242127147388 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1396782062923916} - m_LocalRotation: {x: 0.5, y: -0.5, z: -0.5, w: 0.5} - m_LocalPosition: {x: 0, y: 0.85, z: 0} - m_LocalScale: {x: 0.38461584, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4554064857129596} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4283901937423508 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1129387478859796} - m_LocalRotation: {x: -0, y: 0.38268343, z: -0, w: 0.92387956} - m_LocalPosition: {x: 0, y: 2.1, z: 6.3} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4315486672294942} - - {fileID: 4320635105751664} - - {fileID: 4586389951765900} - - {fileID: 4042226991863502} - - {fileID: 4345259575467704} - - {fileID: 4554064857129596} - - {fileID: 4588771957217034} - - {fileID: 4895871576690222} - - {fileID: 4415045083540138} - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 45, z: 0} ---- !u!4 &4315486672294942 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1114398910245350} - m_LocalRotation: {x: -0.7071068, y: -0.000000044703484, z: -0.000000044703484, w: 0.7071068} - m_LocalPosition: {x: 0, y: 0, z: -0.9000001} - m_LocalScale: {x: 0.30000007, y: 0.48000073, z: 0.3000007} - m_Children: [] - m_Father: {fileID: 4283901937423508} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: -90, y: 0, z: 0} ---- !u!4 &4320635105751664 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1272060045703202} - m_LocalRotation: {x: -0.5, y: 0.5, z: 0.5, w: 0.5} - m_LocalPosition: {x: -0.90000004, y: 0, z: 0} - m_LocalScale: {x: 0.30000025, y: 0.4800002, z: 0.30000028} - m_Children: [] - m_Father: {fileID: 4283901937423508} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: -90, y: 90, z: 0} ---- !u!4 &4340731500216736 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1449857773419816} - m_LocalRotation: {x: 0.5, y: 0.5, z: 0.5, w: 0.5} - m_LocalPosition: {x: 0, y: 0.85, z: 0} - m_LocalScale: {x: 0.38461524, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4895871576690222} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4345259575467704 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1374302196839222} - m_LocalRotation: {x: -0.7071068, y: -0.000000044703484, z: -0.000000044703484, w: 0.7071068} - m_LocalPosition: {x: -0, y: 0, z: -1.98} - m_LocalScale: {x: 0.30000007, y: 0.780001, z: 0.30000085} - m_Children: - - {fileID: 4900039245054002} - m_Father: {fileID: 4283901937423508} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: -90, y: 0, z: 0} ---- !u!4 &4415045083540138 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1828705975967124} - m_LocalRotation: {x: -0, y: 0.38268337, z: -0, w: 0.9238796} - m_LocalPosition: {x: 0, y: 0.1500001, z: 0} - m_LocalScale: {x: 1.0500007, y: 1.0500001, z: 1.0500007} - m_Children: - - {fileID: 4057600146278330} - - {fileID: 4690632006859362} - - {fileID: 4829345825963888} - - {fileID: 4785798153510720} - m_Father: {fileID: 4283901937423508} - m_RootOrder: 8 - m_LocalEulerAnglesHint: {x: 0, y: 45, z: 0} ---- !u!4 &4554064857129596 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1441235561015404} - m_LocalRotation: {x: -0.5, y: 0.5, z: 0.5, w: 0.5} - m_LocalPosition: {x: -1.98, y: 0, z: 0} - m_LocalScale: {x: 0.30000025, y: 0.78000027, z: 0.30000013} - m_Children: - - {fileID: 4259242127147388} - m_Father: {fileID: 4283901937423508} - m_RootOrder: 5 - m_LocalEulerAnglesHint: {x: -90, y: 90, z: 0} ---- !u!4 &4586389951765900 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1942253916102162} - m_LocalRotation: {x: -0.000000044703484, y: 0.7071068, z: 0.7071068, w: 0.000000044703484} - m_LocalPosition: {x: 0, y: 0, z: 0.9000001} - m_LocalScale: {x: 0.3000013, y: 0.48000073, z: 0.30000085} - m_Children: [] - m_Father: {fileID: 4283901937423508} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: -90, y: 180, z: 0} ---- !u!4 &4588771957217034 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1572738585910590} - m_LocalRotation: {x: -0.000000044703484, y: 0.7071068, z: 0.7071068, w: 0.000000044703484} - m_LocalPosition: {x: 0, y: 0, z: 1.9800004} - m_LocalScale: {x: 0.3000016, y: 0.78000104, z: 0.30000085} - m_Children: - - {fileID: 4011799289864642} - m_Father: {fileID: 4283901937423508} - m_RootOrder: 6 - m_LocalEulerAnglesHint: {x: -90, y: 180, z: 0} ---- !u!4 &4690632006859362 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1515584191522134} - m_LocalRotation: {x: -0.22353469, y: 0.0000001490116, z: 0.00000004470348, w: 0.97469604} - m_LocalPosition: {x: 0.198, y: 0.125, z: 0.437} - m_LocalScale: {x: 0.1285858, y: 0.12858574, z: 0.07360156} - m_Children: [] - m_Father: {fileID: 4415045083540138} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: -25.833002, y: 0, z: 0} ---- !u!4 &4785798153510720 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1726653501711894} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0.195, z: 0} - m_LocalScale: {x: 0.88591725, y: 0.19942614, z: 0.88591725} - m_Children: [] - m_Father: {fileID: 4415045083540138} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4829345825963888 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1919512060766040} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: -0.028, z: 0.484} - m_LocalScale: {x: 0.11330591, y: 0.0262317, z: 0.030423056} - m_Children: [] - m_Father: {fileID: 4415045083540138} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4895871576690222 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1187688929114980} - m_LocalRotation: {x: -0.5, y: -0.5, z: -0.5, w: 0.5} - m_LocalPosition: {x: 1.98, y: 0, z: 0} - m_LocalScale: {x: 0.3, y: 0.7800003, z: 0.30000013} - m_Children: - - {fileID: 4340731500216736} - m_Father: {fileID: 4283901937423508} - m_RootOrder: 7 - m_LocalEulerAnglesHint: {x: -90, y: -90, z: 0} ---- !u!4 &4900039245054002 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1932904558829096} - m_LocalRotation: {x: 0.7071068, y: 0.000000044703484, z: 0.000000044703484, w: 0.7071068} - m_LocalPosition: {x: -0, y: 0.85, z: 0} - m_LocalScale: {x: 1, y: 1, z: 0.38461503} - m_Children: [] - m_Father: {fileID: 4345259575467704} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!23 &23032058032275540 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1441235561015404} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23057739860601414 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1114398910245350} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23077321719373320 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1272060045703202} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23151895309585212 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1374302196839222} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23214116999615168 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1942253916102162} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23237912265456852 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1405459036737686} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23271479376984230 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1515584191522134} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 69fefdd39d2b34b169e921910bed9c0d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23350468638451092 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1528865102058454} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 69fefdd39d2b34b169e921910bed9c0d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23407712350003930 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1298268328268866} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23408614112088478 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1828705975967124} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23458909467204720 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1449857773419816} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23573846879420774 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1187688929114980} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23636497540812122 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1932904558829096} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23682645520376206 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1919512060766040} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 69fefdd39d2b34b169e921910bed9c0d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23682678482966492 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1572738585910590} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23857179722177926 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1726653501711894} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: a650e0d6d57f74e708dcef9886196037, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23868441579563002 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1396782062923916} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!33 &33046987671380990 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1441235561015404} - m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33136431790211548 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1396782062923916} - m_Mesh: {fileID: 10207, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33237114310346146 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1572738585910590} - m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33247528189479878 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1298268328268866} - m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33332933015356954 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1528865102058454} - m_Mesh: {fileID: 10207, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33421385354278630 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1942253916102162} - m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33421992822520258 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1405459036737686} - m_Mesh: {fileID: 10207, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33505539604743060 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1828705975967124} - m_Mesh: {fileID: 10207, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33554492852954920 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1187688929114980} - m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33580637687345318 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1919512060766040} - m_Mesh: {fileID: 10207, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33600319474102400 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1374302196839222} - m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33648152897957930 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1272060045703202} - m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33763557645324940 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1726653501711894} - m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33811741900901180 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1932904558829096} - m_Mesh: {fileID: 10207, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33920999609521594 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1515584191522134} - m_Mesh: {fileID: 10207, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33942247511407576 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1114398910245350} - m_Mesh: {fileID: 10208, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33947674706028178 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1449857773419816} - m_Mesh: {fileID: 10207, guid: 0000000000000000e000000000000000, type: 0} ---- !u!54 &54016370531151242 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1441235561015404} - serializedVersion: 2 - m_Mass: 1 - m_Drag: 0 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 0 - m_CollisionDetection: 0 ---- !u!54 &54016577210578658 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1114398910245350} - serializedVersion: 2 - m_Mass: 3 - m_Drag: 0 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 0 - m_CollisionDetection: 0 ---- !u!54 &54132359757210108 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1272060045703202} - serializedVersion: 2 - m_Mass: 3 - m_Drag: 0 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 0 - m_CollisionDetection: 0 ---- !u!54 &54218381078267390 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1374302196839222} - serializedVersion: 2 - m_Mass: 1 - m_Drag: 0 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 0 - m_CollisionDetection: 0 ---- !u!54 &54279887023973758 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1298268328268866} - serializedVersion: 2 - m_Mass: 3 - m_Drag: 0 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 0 - m_CollisionDetection: 0 ---- !u!54 &54341884066505438 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1572738585910590} - serializedVersion: 2 - m_Mass: 1 - m_Drag: 0 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 0 - m_CollisionDetection: 0 ---- !u!54 &54350686216978830 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1942253916102162} - serializedVersion: 2 - m_Mass: 3 - m_Drag: 0 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 0 - m_CollisionDetection: 0 ---- !u!54 &54419147063662066 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1828705975967124} - serializedVersion: 2 - m_Mass: 1 - m_Drag: 0 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 0 - m_CollisionDetection: 0 ---- !u!54 &54496921079023298 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1187688929114980} - serializedVersion: 2 - m_Mass: 1 - m_Drag: 0 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 0 - m_CollisionDetection: 0 ---- !u!114 &114089784393623120 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1187688929114980} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 2666e257ea992476cae0f7f163165e71, type: 3} - m_Name: - m_EditorClassIdentifier: - agent: {fileID: 0} - agentDoneOnGroundContact: 0 - penalizeGroundContact: 0 - groundContactPenalty: 0 - touchingGround: 0 ---- !u!114 &114118084730610894 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1572738585910590} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 2666e257ea992476cae0f7f163165e71, type: 3} - m_Name: - m_EditorClassIdentifier: - agent: {fileID: 0} - agentDoneOnGroundContact: 0 - penalizeGroundContact: 0 - groundContactPenalty: 0 - touchingGround: 0 ---- !u!114 &114201511158296780 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1272060045703202} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 2666e257ea992476cae0f7f163165e71, type: 3} - m_Name: - m_EditorClassIdentifier: - agent: {fileID: 0} - agentDoneOnGroundContact: 1 - penalizeGroundContact: 1 - groundContactPenalty: -1 - touchingGround: 0 ---- !u!114 &114245671031386664 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1374302196839222} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 2666e257ea992476cae0f7f163165e71, type: 3} - m_Name: - m_EditorClassIdentifier: - agent: {fileID: 0} - agentDoneOnGroundContact: 0 - penalizeGroundContact: 0 - groundContactPenalty: 0 - touchingGround: 0 ---- !u!114 &114366290264684242 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1129387478859796} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 1b29724baddfa457da6eeab446fa49ca, type: 3} - m_Name: - m_EditorClassIdentifier: - maxJointSpring: 40000 - jointDampen: 3000 - maxJointForceLimit: 10000 - bodyPartsList: [] ---- !u!114 &114418822382166060 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1828705975967124} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 2666e257ea992476cae0f7f163165e71, type: 3} - m_Name: - m_EditorClassIdentifier: - agent: {fileID: 0} - agentDoneOnGroundContact: 1 - penalizeGroundContact: 1 - groundContactPenalty: -1 - touchingGround: 0 ---- !u!114 &114701352747735826 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1441235561015404} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 2666e257ea992476cae0f7f163165e71, type: 3} - m_Name: - m_EditorClassIdentifier: - agent: {fileID: 0} - agentDoneOnGroundContact: 0 - penalizeGroundContact: 0 - groundContactPenalty: 0 - touchingGround: 0 ---- !u!114 &114744867262310804 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1298268328268866} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 2666e257ea992476cae0f7f163165e71, type: 3} - m_Name: - m_EditorClassIdentifier: - agent: {fileID: 0} - agentDoneOnGroundContact: 1 - penalizeGroundContact: 1 - groundContactPenalty: -1 - touchingGround: 0 ---- !u!114 &114754976708400114 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1942253916102162} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 2666e257ea992476cae0f7f163165e71, type: 3} - m_Name: - m_EditorClassIdentifier: - agent: {fileID: 0} - agentDoneOnGroundContact: 1 - penalizeGroundContact: 1 - groundContactPenalty: -1 - touchingGround: 0 ---- !u!114 &114805933462601644 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1114398910245350} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 2666e257ea992476cae0f7f163165e71, type: 3} - m_Name: - m_EditorClassIdentifier: - agent: {fileID: 0} - agentDoneOnGroundContact: 1 - penalizeGroundContact: 1 - groundContactPenalty: -1 - touchingGround: 0 ---- !u!114 &114824265617332224 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1129387478859796} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 2f37c30a5e8d04117947188818902ef3, type: 3} - m_Name: - m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 0e3b44d36c7a047c4addb92457b12be5, type: 2} - agentParameters: - agentCameras: [] - agentRenderTextures: [] - maxStep: 5000 - resetOnDone: 1 - onDemandDecision: 0 - numberOfActionsBetweenDecisions: 5 - target: {fileID: 0} - ground: {fileID: 0} - detectTargets: 1 - respawnTargetWhenTouched: 1 - targetSpawnRadius: 50 - body: {fileID: 4415045083540138} - leg0Upper: {fileID: 4315486672294942} - leg0Lower: {fileID: 4345259575467704} - leg1Upper: {fileID: 4320635105751664} - leg1Lower: {fileID: 4554064857129596} - leg2Upper: {fileID: 4586389951765900} - leg2Lower: {fileID: 4588771957217034} - leg3Upper: {fileID: 4042226991863502} - leg3Lower: {fileID: 4895871576690222} - rewardMovingTowardsTarget: 1 - rewardFacingTarget: 1 - rewardUseTimePenalty: 0 - useFootGroundedVisualization: 0 - foot0: {fileID: 23636497540812122} - foot1: {fileID: 23868441579563002} - foot2: {fileID: 23237912265456852} - foot3: {fileID: 23458909467204720} - groundedMaterial: {fileID: 0} - unGroundedMaterial: {fileID: 0} ---- !u!135 &135940545379391998 -SphereCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1828705975967124} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Radius: 0.3 - m_Center: {x: 0, y: 0, z: 0} ---- !u!136 &136060786617938986 -CapsuleCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1942253916102162} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.5 - m_Height: 1.7 - m_Direction: 1 - m_Center: {x: 0, y: 0, z: 0} ---- !u!136 &136131153676953572 -CapsuleCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1272060045703202} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.5 - m_Height: 1.7 - m_Direction: 1 - m_Center: {x: 0, y: 0, z: 0} ---- !u!136 &136143404163221586 -CapsuleCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1572738585910590} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.5 - m_Height: 1.7 - m_Direction: 1 - m_Center: {x: 0, y: 0.12, z: 0} ---- !u!136 &136460960731582558 -CapsuleCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1187688929114980} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.5 - m_Height: 1.7 - m_Direction: 1 - m_Center: {x: 0, y: 0.12, z: 0} ---- !u!136 &136669811194203340 -CapsuleCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1441235561015404} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.5 - m_Height: 1.7 - m_Direction: 1 - m_Center: {x: 0, y: 0.12, z: 0} ---- !u!136 &136674299004804612 -CapsuleCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1298268328268866} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.5 - m_Height: 1.7 - m_Direction: 1 - m_Center: {x: 0, y: 0, z: 0} ---- !u!136 &136690235415233670 -CapsuleCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1374302196839222} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.5 - m_Height: 1.7 - m_Direction: 1 - m_Center: {x: 0, y: 0.12, z: 0} ---- !u!136 &136693614685681770 -CapsuleCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1726653501711894} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - m_Radius: 0.5 - m_Height: 2 - m_Direction: 1 - m_Center: {x: 0, y: 0, z: 0} ---- !u!136 &136766532690346814 -CapsuleCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1114398910245350} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - m_Radius: 0.5 - m_Height: 1.7 - m_Direction: 1 - m_Center: {x: 0, y: 0, z: 0} ---- !u!153 &153208165644155028 -ConfigurableJoint: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1187688929114980} - m_ConnectedBody: {fileID: 54279887023973758} - m_Anchor: {x: 0, y: -1, z: 0} - m_Axis: {x: 1, y: 0, z: 0} - m_AutoConfigureConnectedAnchor: 1 - m_ConnectedAnchor: {x: -0.000000049670533, y: 0.624999, z: 0} - serializedVersion: 2 - m_SecondaryAxis: {x: 0, y: 0, z: -1} - m_XMotion: 0 - m_YMotion: 0 - m_ZMotion: 0 - m_AngularXMotion: 1 - m_AngularYMotion: 0 - m_AngularZMotion: 0 - m_LinearLimitSpring: - spring: 0 - damper: 0 - m_LinearLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularXLimitSpring: - spring: 0 - damper: 0 - m_LowAngularXLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_HighAngularXLimit: - limit: 150 - bounciness: 0 - contactDistance: 0 - m_AngularYZLimitSpring: - spring: 0 - damper: 0 - m_AngularYLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularZLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_TargetPosition: {x: 0, y: 0, z: 0} - m_TargetVelocity: {x: 0, y: 0, z: 0} - m_XDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_YDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_TargetRotation: {x: 0, y: 0, z: 0, w: 1} - m_TargetAngularVelocity: {x: 0, y: 0, z: 0} - m_RotationDriveMode: 1 - m_AngularXDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_AngularYZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_SlerpDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ProjectionMode: 1 - m_ProjectionDistance: 0.1 - m_ProjectionAngle: 180 - m_ConfiguredInWorldSpace: 0 - m_SwapBodies: 0 - m_BreakForce: Infinity - m_BreakTorque: Infinity - m_EnableCollision: 0 - m_EnablePreprocessing: 0 - m_MassScale: 1 - m_ConnectedMassScale: 1 ---- !u!153 &153258788415880078 -ConfigurableJoint: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1942253916102162} - m_ConnectedBody: {fileID: 54419147063662066} - m_Anchor: {x: 0, y: -1, z: 0} - m_Axis: {x: 1, y: 0, z: 0} - m_AutoConfigureConnectedAnchor: 1 - m_ConnectedAnchor: {x: -0.28284186, y: -0.14285721, z: 0.2828422} - serializedVersion: 2 - m_SecondaryAxis: {x: 0, y: 0, z: -1} - m_XMotion: 0 - m_YMotion: 0 - m_ZMotion: 0 - m_AngularXMotion: 1 - m_AngularYMotion: 1 - m_AngularZMotion: 0 - m_LinearLimitSpring: - spring: 0 - damper: 0 - m_LinearLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularXLimitSpring: - spring: 0 - damper: 0 - m_LowAngularXLimit: - limit: -60 - bounciness: 0 - contactDistance: 0 - m_HighAngularXLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularYZLimitSpring: - spring: 0 - damper: 0 - m_AngularYLimit: - limit: 20 - bounciness: 0 - contactDistance: 0 - m_AngularZLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_TargetPosition: {x: 0, y: 0, z: 0} - m_TargetVelocity: {x: 0, y: 0, z: 0} - m_XDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_YDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_TargetRotation: {x: 0, y: 0, z: 0, w: 1} - m_TargetAngularVelocity: {x: 0, y: 0, z: 0} - m_RotationDriveMode: 1 - m_AngularXDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_AngularYZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_SlerpDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ProjectionMode: 1 - m_ProjectionDistance: 0.1 - m_ProjectionAngle: 180 - m_ConfiguredInWorldSpace: 0 - m_SwapBodies: 0 - m_BreakForce: Infinity - m_BreakTorque: Infinity - m_EnableCollision: 0 - m_EnablePreprocessing: 0 - m_MassScale: 1 - m_ConnectedMassScale: 1 ---- !u!153 &153407740652563728 -ConfigurableJoint: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1114398910245350} - m_ConnectedBody: {fileID: 54419147063662066} - m_Anchor: {x: 0, y: -1, z: 0} - m_Axis: {x: 1, y: 0, z: 0} - m_AutoConfigureConnectedAnchor: 1 - m_ConnectedAnchor: {x: 0.28284186, y: -0.14285721, z: -0.2828422} - serializedVersion: 2 - m_SecondaryAxis: {x: 0, y: 0, z: -1} - m_XMotion: 0 - m_YMotion: 0 - m_ZMotion: 0 - m_AngularXMotion: 1 - m_AngularYMotion: 1 - m_AngularZMotion: 0 - m_LinearLimitSpring: - spring: 0 - damper: 0 - m_LinearLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularXLimitSpring: - spring: 0 - damper: 0 - m_LowAngularXLimit: - limit: -60 - bounciness: 0 - contactDistance: 0 - m_HighAngularXLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularYZLimitSpring: - spring: 0 - damper: 0 - m_AngularYLimit: - limit: 20 - bounciness: 0 - contactDistance: 0 - m_AngularZLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_TargetPosition: {x: 0, y: 0, z: 0} - m_TargetVelocity: {x: 0, y: 0, z: 0} - m_XDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_YDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_TargetRotation: {x: 0, y: 0, z: 0, w: 1} - m_TargetAngularVelocity: {x: 0, y: 0, z: 0} - m_RotationDriveMode: 1 - m_AngularXDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_AngularYZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_SlerpDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ProjectionMode: 1 - m_ProjectionDistance: 0.1 - m_ProjectionAngle: 180 - m_ConfiguredInWorldSpace: 0 - m_SwapBodies: 0 - m_BreakForce: Infinity - m_BreakTorque: Infinity - m_EnableCollision: 0 - m_EnablePreprocessing: 0 - m_MassScale: 1 - m_ConnectedMassScale: 1 ---- !u!153 &153475598826299386 -ConfigurableJoint: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1572738585910590} - m_ConnectedBody: {fileID: 54350686216978830} - m_Anchor: {x: 0, y: -1, z: 0} - m_Axis: {x: 1, y: 0, z: 0} - m_AutoConfigureConnectedAnchor: 1 - m_ConnectedAnchor: {x: -0.00000072248366, y: 0.62499726, z: -0.000000099340795} - serializedVersion: 2 - m_SecondaryAxis: {x: 0, y: 0, z: -1} - m_XMotion: 0 - m_YMotion: 0 - m_ZMotion: 0 - m_AngularXMotion: 1 - m_AngularYMotion: 0 - m_AngularZMotion: 0 - m_LinearLimitSpring: - spring: 0 - damper: 0 - m_LinearLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularXLimitSpring: - spring: 0 - damper: 0 - m_LowAngularXLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_HighAngularXLimit: - limit: 150 - bounciness: 0 - contactDistance: 0 - m_AngularYZLimitSpring: - spring: 0 - damper: 0 - m_AngularYLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularZLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_TargetPosition: {x: 0, y: 0, z: 0} - m_TargetVelocity: {x: 0, y: 0, z: 0} - m_XDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_YDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_TargetRotation: {x: 0, y: 0, z: 0, w: 1} - m_TargetAngularVelocity: {x: 0, y: 0, z: 0} - m_RotationDriveMode: 1 - m_AngularXDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_AngularYZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_SlerpDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ProjectionMode: 1 - m_ProjectionDistance: 0.1 - m_ProjectionAngle: 180 - m_ConfiguredInWorldSpace: 0 - m_SwapBodies: 0 - m_BreakForce: Infinity - m_BreakTorque: Infinity - m_EnableCollision: 0 - m_EnablePreprocessing: 0 - m_MassScale: 1 - m_ConnectedMassScale: 1 ---- !u!153 &153558694387803708 -ConfigurableJoint: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1441235561015404} - m_ConnectedBody: {fileID: 54132359757210108} - m_Anchor: {x: 0, y: -1, z: 0} - m_Axis: {x: 1, y: 0, z: 0} - m_AutoConfigureConnectedAnchor: 1 - m_ConnectedAnchor: {x: 0.00000014901148, y: 0.6249993, z: 0} - serializedVersion: 2 - m_SecondaryAxis: {x: 0, y: 0, z: -1} - m_XMotion: 0 - m_YMotion: 0 - m_ZMotion: 0 - m_AngularXMotion: 1 - m_AngularYMotion: 0 - m_AngularZMotion: 0 - m_LinearLimitSpring: - spring: 0 - damper: 0 - m_LinearLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularXLimitSpring: - spring: 0 - damper: 0 - m_LowAngularXLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_HighAngularXLimit: - limit: 150 - bounciness: 0 - contactDistance: 0 - m_AngularYZLimitSpring: - spring: 0 - damper: 0 - m_AngularYLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularZLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_TargetPosition: {x: 0, y: 0, z: 0} - m_TargetVelocity: {x: 0, y: 0, z: 0} - m_XDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_YDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_TargetRotation: {x: 0, y: 0, z: 0, w: 1} - m_TargetAngularVelocity: {x: 0, y: 0, z: 0} - m_RotationDriveMode: 1 - m_AngularXDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_AngularYZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_SlerpDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ProjectionMode: 1 - m_ProjectionDistance: 0.1 - m_ProjectionAngle: 180 - m_ConfiguredInWorldSpace: 0 - m_SwapBodies: 0 - m_BreakForce: Infinity - m_BreakTorque: Infinity - m_EnableCollision: 0 - m_EnablePreprocessing: 0 - m_MassScale: 1 - m_ConnectedMassScale: 1 ---- !u!153 &153765018551042116 -ConfigurableJoint: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1374302196839222} - m_ConnectedBody: {fileID: 54016577210578658} - m_Anchor: {x: 0, y: -1, z: 0} - m_Axis: {x: 1, y: 0, z: 0} - m_AutoConfigureConnectedAnchor: 1 - m_ConnectedAnchor: {x: -0.0000013185324, y: 0.624996, z: 0.000000099340845} - serializedVersion: 2 - m_SecondaryAxis: {x: 0, y: 0, z: -1} - m_XMotion: 0 - m_YMotion: 0 - m_ZMotion: 0 - m_AngularXMotion: 1 - m_AngularYMotion: 0 - m_AngularZMotion: 0 - m_LinearLimitSpring: - spring: 0 - damper: 0 - m_LinearLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularXLimitSpring: - spring: 0 - damper: 0 - m_LowAngularXLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_HighAngularXLimit: - limit: 150 - bounciness: 0 - contactDistance: 0 - m_AngularYZLimitSpring: - spring: 0 - damper: 0 - m_AngularYLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularZLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_TargetPosition: {x: 0, y: 0, z: 0} - m_TargetVelocity: {x: 0, y: 0, z: 0} - m_XDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_YDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_TargetRotation: {x: 0, y: 0, z: 0, w: 1} - m_TargetAngularVelocity: {x: 0, y: 0, z: 0} - m_RotationDriveMode: 1 - m_AngularXDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_AngularYZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_SlerpDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ProjectionMode: 1 - m_ProjectionDistance: 0.1 - m_ProjectionAngle: 180 - m_ConfiguredInWorldSpace: 0 - m_SwapBodies: 0 - m_BreakForce: Infinity - m_BreakTorque: Infinity - m_EnableCollision: 0 - m_EnablePreprocessing: 0 - m_MassScale: 1 - m_ConnectedMassScale: 1 ---- !u!153 &153842898101386942 -ConfigurableJoint: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1298268328268866} - m_ConnectedBody: {fileID: 54419147063662066} - m_Anchor: {x: 0, y: -1, z: 0} - m_Axis: {x: 1, y: 0, z: 0} - m_AutoConfigureConnectedAnchor: 1 - m_ConnectedAnchor: {x: 0.28284237, y: -0.14285721, z: 0.28284237} - serializedVersion: 2 - m_SecondaryAxis: {x: 0, y: 0, z: -1} - m_XMotion: 0 - m_YMotion: 0 - m_ZMotion: 0 - m_AngularXMotion: 1 - m_AngularYMotion: 1 - m_AngularZMotion: 0 - m_LinearLimitSpring: - spring: 0 - damper: 0 - m_LinearLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularXLimitSpring: - spring: 0 - damper: 0 - m_LowAngularXLimit: - limit: -60 - bounciness: 0 - contactDistance: 0 - m_HighAngularXLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularYZLimitSpring: - spring: 0 - damper: 0 - m_AngularYLimit: - limit: 20 - bounciness: 0 - contactDistance: 0 - m_AngularZLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_TargetPosition: {x: 0, y: 0, z: 0} - m_TargetVelocity: {x: 0, y: 0, z: 0} - m_XDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_YDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_TargetRotation: {x: 0, y: 0, z: 0, w: 1} - m_TargetAngularVelocity: {x: 0, y: 0, z: 0} - m_RotationDriveMode: 1 - m_AngularXDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_AngularYZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_SlerpDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ProjectionMode: 1 - m_ProjectionDistance: 0.1 - m_ProjectionAngle: 180 - m_ConfiguredInWorldSpace: 0 - m_SwapBodies: 0 - m_BreakForce: Infinity - m_BreakTorque: Infinity - m_EnableCollision: 0 - m_EnablePreprocessing: 0 - m_MassScale: 1 - m_ConnectedMassScale: 1 ---- !u!153 &153987082673184770 -ConfigurableJoint: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1272060045703202} - m_ConnectedBody: {fileID: 54419147063662066} - m_Anchor: {x: 0, y: -1, z: 0} - m_Axis: {x: 1, y: 0, z: 0} - m_AutoConfigureConnectedAnchor: 1 - m_ConnectedAnchor: {x: -0.28284237, y: -0.14285721, z: -0.28284237} - serializedVersion: 2 - m_SecondaryAxis: {x: 0, y: 0, z: -1} - m_XMotion: 0 - m_YMotion: 0 - m_ZMotion: 0 - m_AngularXMotion: 1 - m_AngularYMotion: 1 - m_AngularZMotion: 0 - m_LinearLimitSpring: - spring: 0 - damper: 0 - m_LinearLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularXLimitSpring: - spring: 0 - damper: 0 - m_LowAngularXLimit: - limit: -60 - bounciness: 0 - contactDistance: 0 - m_HighAngularXLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_AngularYZLimitSpring: - spring: 0 - damper: 0 - m_AngularYLimit: - limit: 20 - bounciness: 0 - contactDistance: 0 - m_AngularZLimit: - limit: 0 - bounciness: 0 - contactDistance: 0 - m_TargetPosition: {x: 0, y: 0, z: 0} - m_TargetVelocity: {x: 0, y: 0, z: 0} - m_XDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_YDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_TargetRotation: {x: 0, y: 0, z: 0, w: 1} - m_TargetAngularVelocity: {x: 0, y: 0, z: 0} - m_RotationDriveMode: 1 - m_AngularXDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_AngularYZDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_SlerpDrive: - serializedVersion: 3 - positionSpring: 0 - positionDamper: 0 - maximumForce: 3.4028233e+38 - m_ProjectionMode: 1 - m_ProjectionDistance: 0.1 - m_ProjectionAngle: 180 - m_ConfiguredInWorldSpace: 0 - m_SwapBodies: 0 - m_BreakForce: Infinity - m_BreakTorque: Infinity - m_EnableCollision: 0 - m_EnablePreprocessing: 0 - m_MassScale: 1 - m_ConnectedMassScale: 1 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Prefabs/DynamicPlatform.prefab b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Prefabs/DynamicPlatform.prefab index 4f6bc4063e..7f34ab30e9 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Prefabs/DynamicPlatform.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Prefabs/DynamicPlatform.prefab @@ -293,6 +293,7 @@ GameObject: serializedVersion: 5 m_Component: - component: {fileID: 4313455366547514} + - component: {fileID: 114060650647145362} - component: {fileID: 114590693924030052} - component: {fileID: 114423363226357902} m_Layer: 0 @@ -2086,6 +2087,28 @@ Light: m_UseColorTemperature: 0 m_ShadowRadius: 0 m_ShadowAngle: 0 +--- !u!114 &114060650647145362 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1515093357607024} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 126 + numStackedVectorObservations: 1 + vectorActionSize: 14000000 + cameraResolutions: [] + vectorActionDescriptions: [] + vectorActionSpaceType: 1 + m_Model: {fileID: 11400000, guid: abc9c8f2180154ed7ba3f116ab0beb90, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: CrawlerDynamic --- !u!114 &114157055237627828 MonoBehaviour: m_ObjectHideFlags: 1 @@ -2192,7 +2215,6 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 2f37c30a5e8d04117947188818902ef3, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 0e3b44d36c7a047c4addb92457b12be5, type: 2} agentParameters: agentCameras: [] agentRenderTextures: [] @@ -2203,6 +2225,7 @@ MonoBehaviour: target: {fileID: 4490950947783742} ground: {fileID: 4684408634944056} detectTargets: 1 + targetIsStatic: 0 respawnTargetWhenTouched: 1 targetSpawnRadius: 40 body: {fileID: 4331762859142564} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Prefabs/FixedPlatform.prefab b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Prefabs/FixedPlatform.prefab index a6879df7c9..0f97e6d3a8 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Prefabs/FixedPlatform.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Prefabs/FixedPlatform.prefab @@ -259,6 +259,7 @@ GameObject: serializedVersion: 5 m_Component: - component: {fileID: 4743084330461368} + - component: {fileID: 114727679958902886} - component: {fileID: 114230237520033992} - component: {fileID: 114375802757824636} m_Layer: 0 @@ -1810,7 +1811,6 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 2f37c30a5e8d04117947188818902ef3, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 0505e961608004377974940ed17e03d5, type: 2} agentParameters: agentCameras: [] agentRenderTextures: [] @@ -1938,6 +1938,28 @@ MonoBehaviour: penalizeGroundContact: 0 groundContactPenalty: 0 touchingGround: 0 +--- !u!114 &114727679958902886 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1492298671135358} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 126 + numStackedVectorObservations: 1 + vectorActionSize: 14000000 + cameraResolutions: [] + vectorActionDescriptions: [] + vectorActionSpaceType: 1 + m_Model: {fileID: 11400000, guid: 48982d8fa360a4ed0bb265495e4f378b, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: CrawlerStatic --- !u!114 &114954029223843696 MonoBehaviour: m_ObjectHideFlags: 1 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Scenes/CrawlerDynamicTarget.unity b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Scenes/CrawlerDynamicTarget.unity index 182d2be923..651e827aa3 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Scenes/CrawlerDynamicTarget.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Scenes/CrawlerDynamicTarget.unity @@ -1324,10 +1324,8 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: broadcastHub: - broadcastingBrains: + brainsToControl: - {fileID: 11400000, guid: 0e3b44d36c7a047c4addb92457b12be5, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 m_TrainingConfiguration: width: 80 height: 80 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Scenes/CrawlerStaticTarget.unity b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Scenes/CrawlerStaticTarget.unity index 3fda137822..6a4450cc17 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Scenes/CrawlerStaticTarget.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Scenes/CrawlerStaticTarget.unity @@ -1346,10 +1346,8 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: broadcastHub: - broadcastingBrains: + brainsToControl: - {fileID: 11400000, guid: 0505e961608004377974940ed17e03d5, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 m_TrainingConfiguration: width: 1280 height: 720 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Scripts/CrawlerAgent.cs b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Scripts/CrawlerAgent.cs index e286f2ae41..2983ed9e83 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Scripts/CrawlerAgent.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/Crawler/Scripts/CrawlerAgent.cs @@ -9,7 +9,7 @@ public class CrawlerAgent : Agent public Transform ground; public bool detectTargets; - public bool targetIsStatic = false; + public bool targetIsStatic; public bool respawnTargetWhenTouched; public float targetSpawnRadius; diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerDynamic.nn b/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerDynamic.nn new file mode 100644 index 0000000000..fc8e03a99c Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerDynamic.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerDynamic.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerDynamic.nn.meta new file mode 100644 index 0000000000..f6a6afbf9f --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerDynamic.nn.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 039557e683d584183a2a82cf8b1904c0 +ScriptedImporter: + userData: + assetBundleName: + assetBundleVariant: + script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerDynamicLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerDynamicLearning.nn deleted file mode 100644 index 4942611525..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerDynamicLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerDynamicLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerDynamicLearning.nn.meta deleted file mode 100644 index ba59457d02..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerDynamicLearning.nn.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: abc9c8f2180154ed7ba3f116ab0beb90 -ScriptedImporter: - userData: - assetBundleName: - assetBundleVariant: - script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerStatic.nn b/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerStatic.nn new file mode 100644 index 0000000000..77cf29377c Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerStatic.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerStatic.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerStatic.nn.meta new file mode 100644 index 0000000000..f24c7e348e --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerStatic.nn.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: ac4a23ff4713140198629ae0844926ee +ScriptedImporter: + userData: + assetBundleName: + assetBundleVariant: + script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerStaticLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerStaticLearning.nn deleted file mode 100644 index 6c23b931e9..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerStaticLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerStaticLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerStaticLearning.nn.meta deleted file mode 100644 index 22b2e59ce7..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Crawler/TFModels/CrawlerStaticLearning.nn.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: 48982d8fa360a4ed0bb265495e4f378b -ScriptedImporter: - userData: - assetBundleName: - assetBundleVariant: - script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains.meta b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains.meta deleted file mode 100644 index 1e1160a92d..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: ce50f8782646e42d3906cc3d58e49791 -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/FoodCollectorLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/FoodCollectorLearning.asset deleted file mode 100644 index a630d944f6..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/FoodCollectorLearning.asset +++ /dev/null @@ -1,26 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: FoodCollectorLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 53 - numStackedVectorObservations: 1 - vectorActionSize: 03000000030000000300000002000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - - - - - vectorActionSpaceType: 0 - model: {fileID: 11400000, guid: d32fca21cf4c04536ab7f88eb9de83e0, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/FoodCollectorLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/FoodCollectorLearning.asset.meta deleted file mode 100644 index 883a28dd4a..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/FoodCollectorLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 9e7865ec29c894c2d8c1617b0fa392f9 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/FoodCollectorPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/FoodCollectorPlayer.asset deleted file mode 100644 index 3f68a3e36d..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/FoodCollectorPlayer.asset +++ /dev/null @@ -1,42 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: FoodCollectorPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 53 - numStackedVectorObservations: 1 - vectorActionSize: 03000000030000000300000002000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - - - - - vectorActionSpaceType: 0 - keyContinuousPlayerActions: [] - axisContinuousPlayerActions: [] - discretePlayerActions: - - key: 119 - branchIndex: 0 - value: 1 - - key: 115 - branchIndex: 0 - value: 2 - - key: 97 - branchIndex: 2 - value: 1 - - key: 100 - branchIndex: 2 - value: 2 - - key: 32 - branchIndex: 3 - value: 1 diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/FoodCollectorPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/FoodCollectorPlayer.asset.meta deleted file mode 100644 index fe490ff03e..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/FoodCollectorPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: dff7429d656234fed84c4fac2a7a683c -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/VisualFoodCollectorLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/VisualFoodCollectorLearning.asset deleted file mode 100644 index 23b9df8c09..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/VisualFoodCollectorLearning.asset +++ /dev/null @@ -1,29 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: VisualFoodCollectorLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 0 - numStackedVectorObservations: 1 - vectorActionSize: 03000000030000000300000002000000 - cameraResolutions: - - width: 84 - height: 84 - blackAndWhite: 0 - vectorActionDescriptions: - - - - - - - - - vectorActionSpaceType: 0 - model: {fileID: 0} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/VisualFoodCollectorLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/VisualFoodCollectorLearning.asset.meta deleted file mode 100644 index 511e90d095..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/VisualFoodCollectorLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 24e823594179d48189b2c78003c50ce0 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/VisualFoodCollectorPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/VisualFoodCollectorPlayer.asset deleted file mode 100644 index 9e05f0b246..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/VisualFoodCollectorPlayer.asset +++ /dev/null @@ -1,45 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: VisualFoodCollectorPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 0 - numStackedVectorObservations: 1 - vectorActionSize: 03000000030000000300000002000000 - cameraResolutions: - - width: 84 - height: 84 - blackAndWhite: 0 - vectorActionDescriptions: - - - - - - - - - vectorActionSpaceType: 0 - keyContinuousPlayerActions: [] - axisContinuousPlayerActions: [] - discretePlayerActions: - - key: 119 - branchIndex: 0 - value: 1 - - key: 115 - branchIndex: 0 - value: 2 - - key: 97 - branchIndex: 2 - value: 1 - - key: 100 - branchIndex: 2 - value: 2 - - key: 32 - branchIndex: 3 - value: 1 diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/VisualFoodCollectorPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/VisualFoodCollectorPlayer.asset.meta deleted file mode 100644 index 59f7fc412d..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Brains/VisualFoodCollectorPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 670f27cd2712f4f7badaa1e9fc7577ce -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Prefabs/FoodCollectorArea.prefab b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Prefabs/FoodCollectorArea.prefab index 91c7e1ed42..df1d5ff597 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Prefabs/FoodCollectorArea.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Prefabs/FoodCollectorArea.prefab @@ -494,6 +494,7 @@ GameObject: - component: {fileID: 4419274671784554} - component: {fileID: 65550728419070768} - component: {fileID: 54936164982484646} + - component: {fileID: 114374774605792098} - component: {fileID: 114762047763154270} - component: {fileID: 114176228333253036} m_Layer: 0 @@ -547,6 +548,7 @@ GameObject: - component: {fileID: 4756368533889646} - component: {fileID: 65905012397919158} - component: {fileID: 54504078365531932} + - component: {fileID: 114522573150607728} - component: {fileID: 114416645532260476} - component: {fileID: 114711827726849508} m_Layer: 0 @@ -601,6 +603,7 @@ GameObject: - component: {fileID: 4426245476092464} - component: {fileID: 65152194455140476} - component: {fileID: 54961653455021136} + - component: {fileID: 114980787530065684} - component: {fileID: 114192565006091356} - component: {fileID: 114542632553128056} m_Layer: 0 @@ -721,6 +724,7 @@ GameObject: - component: {fileID: 4259834826122778} - component: {fileID: 65761952312736034} - component: {fileID: 54819001862035794} + - component: {fileID: 114878550018296316} - component: {fileID: 114661830999747712} - component: {fileID: 114189751434580810} m_Layer: 0 @@ -774,6 +778,7 @@ GameObject: - component: {fileID: 4137908820211030} - component: {fileID: 65367560123033576} - component: {fileID: 54895479068989492} + - component: {fileID: 114035338027591536} - component: {fileID: 114821937036444478} - component: {fileID: 114235147148547996} m_Layer: 0 @@ -3666,6 +3671,27 @@ BoxCollider: serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} +--- !u!114 &114035338027591536 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1706274796045088} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 53 + numStackedVectorObservations: 1 + vectorActionSize: 03000000030000000300000002000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 11400000, guid: 36ab3e93020504f48858d0856f939685, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: FoodCollector --- !u!114 &114176228333253036 MonoBehaviour: m_ObjectHideFlags: 1 @@ -3677,10 +3703,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: c66e6845309d241c78a6d77ee2567928, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 9e7865ec29c894c2d8c1617b0fa392f9, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 @@ -3723,10 +3746,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: c66e6845309d241c78a6d77ee2567928, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 9e7865ec29c894c2d8c1617b0fa392f9, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 @@ -3763,10 +3783,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: c66e6845309d241c78a6d77ee2567928, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 9e7865ec29c894c2d8c1617b0fa392f9, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 @@ -3781,6 +3798,27 @@ MonoBehaviour: myLaser: {fileID: 1045923826166930} contribute: 0 useVectorObs: 1 +--- !u!114 &114374774605792098 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1464820575638702} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 53 + numStackedVectorObservations: 1 + vectorActionSize: 03000000030000000300000002000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 11400000, guid: 36ab3e93020504f48858d0856f939685, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: FoodCollector --- !u!114 &114416645532260476 MonoBehaviour: m_ObjectHideFlags: 1 @@ -3792,6 +3830,27 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} m_Name: m_EditorClassIdentifier: +--- !u!114 &114522573150607728 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1495617568563208} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 53 + numStackedVectorObservations: 1 + vectorActionSize: 03000000030000000300000002000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 11400000, guid: 36ab3e93020504f48858d0856f939685, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: FoodCollector --- !u!114 &114542632553128056 MonoBehaviour: m_ObjectHideFlags: 1 @@ -3803,10 +3862,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: c66e6845309d241c78a6d77ee2567928, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 9e7865ec29c894c2d8c1617b0fa392f9, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 @@ -3843,10 +3899,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: c66e6845309d241c78a6d77ee2567928, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 9e7865ec29c894c2d8c1617b0fa392f9, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 @@ -3883,3 +3936,45 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} m_Name: m_EditorClassIdentifier: +--- !u!114 &114878550018296316 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1672905243433088} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 53 + numStackedVectorObservations: 1 + vectorActionSize: 03000000030000000300000002000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 11400000, guid: 36ab3e93020504f48858d0856f939685, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: FoodCollector +--- !u!114 &114980787530065684 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1601500200010266} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 53 + numStackedVectorObservations: 1 + vectorActionSize: 03000000030000000300000002000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 11400000, guid: 36ab3e93020504f48858d0856f939685, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: FoodCollector diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Prefabs/FoodCollectorTeachingArea.prefab b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Prefabs/FoodCollectorTeachingArea.prefab deleted file mode 100644 index 6f902233ae..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Prefabs/FoodCollectorTeachingArea.prefab +++ /dev/null @@ -1,3959 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!1001 &100100000 -Prefab: - m_ObjectHideFlags: 1 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: [] - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 0} - m_RootGameObject: {fileID: 1148463453037344} - m_IsPrefabParent: 1 ---- !u!1 &1019040130223056 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4759135727950766} - m_Layer: 0 - m_Name: Court - m_TagString: wall - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1032277567487600 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4463053708203254} - - component: {fileID: 20461256959685446} - m_Layer: 0 - m_Name: AgentCamera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 0 ---- !u!1 &1035806283124670 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4089023012196426} - m_Layer: 0 - m_Name: Laser - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1036718751643974 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4440531283818028} - m_Layer: 0 - m_Name: Laser - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1060127683763250 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4401972798800426} - - component: {fileID: 33495394254014352} - - component: {fileID: 23701671039184230} - m_Layer: 0 - m_Name: AgentCube_Purple - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1083644460692486 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4860428730037900} - - component: {fileID: 65267459799989724} - - component: {fileID: 54616290575522960} - - component: {fileID: 114516767535109608} - - component: {fileID: 114260856195490218} - m_Layer: 0 - m_Name: StudentAgent (1) - m_TagString: agent - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1104285561328794 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4043521150123220} - - component: {fileID: 33374377061733760} - - component: {fileID: 23000313953293332} - m_Layer: 0 - m_Name: mouth - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1115585505442730 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4643487051549420} - - component: {fileID: 33461864521879796} - - component: {fileID: 23794318046219148} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1118490959067888 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4855230491420082} - m_Layer: 0 - m_Name: Laser - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1119160234761140 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4874637605867312} - - component: {fileID: 33260164656656074} - - component: {fileID: 23189145310999932} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1148463453037344 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4372429183926998} - - component: {fileID: 114007330774343768} - m_Layer: 0 - m_Name: FoodCollectorTeachingArea - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1166161536296714 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4417448006619290} - - component: {fileID: 20824293318392576} - m_Layer: 0 - m_Name: AgentCamera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 0 ---- !u!1 &1178344588676694 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4998859833614822} - - component: {fileID: 33506182739557962} - - component: {fileID: 23977096569633444} - m_Layer: 0 - m_Name: Headband - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1207275472187634 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4608788594177934} - - component: {fileID: 33678750942580600} - - component: {fileID: 65457923031945612} - - component: {fileID: 23632454201028546} - m_Layer: 0 - m_Name: Cube - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1230785203868376 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4894538592068096} - - component: {fileID: 33613173212918468} - - component: {fileID: 65760791154338318} - - component: {fileID: 23421736323192752} - m_Layer: 0 - m_Name: Cube - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1277627577235966 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4248669074871358} - - component: {fileID: 33784303488748150} - - component: {fileID: 23183453019996956} - m_Layer: 0 - m_Name: AgentCube_Purple - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1283864300740804 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4937981289722198} - - component: {fileID: 33839396619905048} - - component: {fileID: 23570499375499866} - - component: {fileID: 64687717889352452} - m_Layer: 0 - m_Name: WallsOuter - m_TagString: wall - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1326063871773226 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4374782728896950} - - component: {fileID: 33159088425452110} - - component: {fileID: 23251368955219986} - m_Layer: 0 - m_Name: Headband - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1408232831101942 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4261013212553228} - - component: {fileID: 20249574135862722} - - component: {fileID: 92966420001434432} - m_Layer: 0 - m_Name: PlayerCam - m_TagString: MainCamera - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1419399000231728 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4387319972833638} - - component: {fileID: 33598340761627654} - - component: {fileID: 65320662598926772} - - component: {fileID: 23619187350806738} - m_Layer: 0 - m_Name: Cube (7) - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1460055155855532 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4879398375124082} - - component: {fileID: 33965989747114456} - - component: {fileID: 23966511415204298} - m_Layer: 0 - m_Name: mouth - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1472886453686564 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4738281783443470} - - component: {fileID: 65432908789117412} - - component: {fileID: 54777439013240364} - - component: {fileID: 114688308485477898} - - component: {fileID: 114062657662370576} - m_Layer: 0 - m_Name: StudentAgent (3) - m_TagString: agent - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1480973913292458 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4146027823984860} - - component: {fileID: 33490642010653766} - - component: {fileID: 65177541691534190} - - component: {fileID: 23660591068400810} - m_Layer: 0 - m_Name: Cube (7) - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1495140326038082 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4809538760796712} - - component: {fileID: 33508394712190790} - - component: {fileID: 65860898373705094} - - component: {fileID: 23491236768280636} - m_Layer: 0 - m_Name: Cube (7) - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1499864950410692 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4023872978181988} - - component: {fileID: 33217733682310280} - - component: {fileID: 23087407082491696} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1511448244239904 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4543132746757250} - - component: {fileID: 33498657528635338} - - component: {fileID: 23144213216548164} - m_Layer: 0 - m_Name: AgentCube_Blue - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1517411571266264 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4818148766583302} - - component: {fileID: 33672694124524946} - - component: {fileID: 23392637879397284} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1532540667329522 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4122198474916958} - - component: {fileID: 33203886542245800} - - component: {fileID: 23599136802246988} - m_Layer: 0 - m_Name: Headband - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1533469610404082 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4191741480206292} - - component: {fileID: 33375487932990328} - - component: {fileID: 23473223860544830} - m_Layer: 0 - m_Name: Headband - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1538412416681886 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4489175365708016} - - component: {fileID: 33520066304594230} - - component: {fileID: 65674116623153200} - - component: {fileID: 23083013243699668} - m_Layer: 0 - m_Name: Cube (7) - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1550625483787518 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4420472081310982} - - component: {fileID: 33471048937125694} - - component: {fileID: 23211890474760446} - m_Layer: 0 - m_Name: mouth - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1614221853968290 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4078368073314408} - - component: {fileID: 33084017896785664} - - component: {fileID: 23892380659979872} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1622977628854186 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4013191745572276} - - component: {fileID: 33122382056330570} - - component: {fileID: 23877466885887120} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1623737045195386 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4612669422693706} - - component: {fileID: 33574550919569692} - - component: {fileID: 23844901860044028} - - component: {fileID: 65137122363770072} - m_Layer: 0 - m_Name: Floor - m_TagString: ground - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1640548838061932 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4618952072988912} - - component: {fileID: 33707046786369034} - - component: {fileID: 23185188104928530} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1655399711430926 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4915577572558104} - - component: {fileID: 33263065535596452} - - component: {fileID: 23341744008000800} - m_Layer: 0 - m_Name: mouth - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1679799345091900 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4366725126600838} - - component: {fileID: 20886270057421904} - m_Layer: 0 - m_Name: AgentCamera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 0 ---- !u!1 &1693583332082218 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4412211215558322} - - component: {fileID: 20698829392063646} - m_Layer: 0 - m_Name: AgentCamera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 0 ---- !u!1 &1707840480972208 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4839102916901406} - - component: {fileID: 33593948490422464} - - component: {fileID: 65563168914267318} - - component: {fileID: 23492882127774122} - m_Layer: 0 - m_Name: Cube - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1717920238940982 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4721323319590288} - m_Layer: 0 - m_Name: Laser - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1730447000192316 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4547659310577522} - - component: {fileID: 33630981864968192} - - component: {fileID: 23106919239663994} - m_Layer: 0 - m_Name: AgentCube_Purple - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1739239930433140 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4158445468848778} - - component: {fileID: 33813723186024048} - - component: {fileID: 23310558825950146} - m_Layer: 0 - m_Name: Headband - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1780298668326468 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4850826115460204} - - component: {fileID: 33603579224617926} - - component: {fileID: 65455258309594506} - - component: {fileID: 23165959560319070} - m_Layer: 0 - m_Name: Cube - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1817700244901568 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4973205504095692} - - component: {fileID: 65355355437149540} - - component: {fileID: 54416501493853776} - - component: {fileID: 114569279624194478} - - component: {fileID: 114615827905664130} - m_Layer: 0 - m_Name: TeacherAgent - m_TagString: agent - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1851446196430958 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4038901482202374} - - component: {fileID: 33521097570988880} - - component: {fileID: 23592439088075056} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1855266688095076 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4888429316354804} - - component: {fileID: 20969894216019868} - m_Layer: 0 - m_Name: AgentCamera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 0 ---- !u!1 &1874858069831842 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4163625821714438} - - component: {fileID: 33777269475212208} - - component: {fileID: 23236608996151106} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1899934101036282 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4125965426428202} - - component: {fileID: 33508935184907494} - - component: {fileID: 65959685830105506} - - component: {fileID: 23233818304104802} - m_Layer: 0 - m_Name: Cube (7) - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1908911473514362 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4820170975554988} - m_Layer: 0 - m_Name: Laser - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1909382423022668 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4163414707488626} - - component: {fileID: 65819040859489448} - - component: {fileID: 54786813764074612} - - component: {fileID: 114139780013492282} - - component: {fileID: 114082336001058488} - m_Layer: 0 - m_Name: StudentAgent - m_TagString: agent - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1913123685927870 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4975429012998718} - - component: {fileID: 33973803526351230} - - component: {fileID: 23398093469604598} - m_Layer: 0 - m_Name: mouth - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1969596566606862 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4090185576666620} - - component: {fileID: 65712219651821442} - - component: {fileID: 54145483456843648} - - component: {fileID: 114090124869684900} - - component: {fileID: 114249416559197368} - m_Layer: 0 - m_Name: StudentAgent (2) - m_TagString: agent - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1979837169441746 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4295545484486642} - - component: {fileID: 33381665793525412} - - component: {fileID: 23641427415559202} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1993066047395702 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4316615647955974} - - component: {fileID: 33391339757244194} - - component: {fileID: 65508887627785202} - - component: {fileID: 23292970588553896} - m_Layer: 0 - m_Name: Cube - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1999690940405766 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4938936867781656} - - component: {fileID: 33022767511227186} - - component: {fileID: 23357581325744500} - m_Layer: 0 - m_Name: AgentCube_Purple - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &4013191745572276 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1622977628854186} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4248669074871358} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4023872978181988 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1499864950410692} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: -0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4938936867781656} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4038901482202374 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1851446196430958} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: -0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4248669074871358} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4043521150123220 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1104285561328794} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0, y: -0.18299997, z: 0.50040054} - m_LocalScale: {x: 0.27602, y: 0.042489994, z: 0.13891} - m_Children: [] - m_Father: {fileID: 4547659310577522} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4078368073314408 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1614221853968290} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4938936867781656} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4089023012196426 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1035806283124670} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4850826115460204} - - {fileID: 4809538760796712} - m_Father: {fileID: 4860428730037900} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4090185576666620 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1969596566606862} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0.84, y: 1.5, z: -9.559753} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4547659310577522} - - {fileID: 4440531283818028} - m_Father: {fileID: 4372429183926998} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4122198474916958 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1532540667329522} - m_LocalRotation: {x: -0, y: -0, z: -0.036135223, w: 0.999347} - m_LocalPosition: {x: 0, y: 0.341, z: 0} - m_LocalScale: {x: 1.0441425, y: 0.19278127, z: 1.0441422} - m_Children: [] - m_Father: {fileID: 4401972798800426} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: -4.142} ---- !u!4 &4125965426428202 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1899934101036282} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.77, y: 0.26, z: 20} - m_Children: [] - m_Father: {fileID: 4440531283818028} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4146027823984860 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1480973913292458} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.77, y: 0.26, z: 20} - m_Children: [] - m_Father: {fileID: 4855230491420082} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4158445468848778 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1739239930433140} - m_LocalRotation: {x: -0, y: -0, z: -0.036135223, w: 0.999347} - m_LocalPosition: {x: 0, y: 0.341, z: 0} - m_LocalScale: {x: 1.0441425, y: 0.19278127, z: 1.0441422} - m_Children: [] - m_Father: {fileID: 4248669074871358} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: -4.142} ---- !u!4 &4163414707488626 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1909382423022668} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0.84, y: 1.5, z: -9.559753} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4401972798800426} - - {fileID: 4721323319590288} - m_Father: {fileID: 4372429183926998} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4163625821714438 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1874858069831842} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4547659310577522} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4191741480206292 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1533469610404082} - m_LocalRotation: {x: -0, y: -0, z: 0.016506119, w: 0.9998638} - m_LocalPosition: {x: 0, y: 0.341, z: 0} - m_LocalScale: {x: 1.0441425, y: 0.19278127, z: 1.0441422} - m_Children: [] - m_Father: {fileID: 4543132746757250} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: -179.99998, z: 1.8920001} ---- !u!4 &4248669074871358 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1277627577235966} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4366725126600838} - - {fileID: 4013191745572276} - - {fileID: 4038901482202374} - - {fileID: 4915577572558104} - - {fileID: 4158445468848778} - m_Father: {fileID: 4860428730037900} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4261013212553228 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1408232831101942} - m_LocalRotation: {x: 0.08715578, y: -0, z: -0, w: 0.9961947} - m_LocalPosition: {x: 0, y: 2, z: -4} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4973205504095692} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 10, y: 0, z: 0} ---- !u!4 &4295545484486642 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1979837169441746} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4543132746757250} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4316615647955974 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1993066047395702} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.35, y: 0.13, z: 20} - m_Children: [] - m_Father: {fileID: 4820170975554988} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4366725126600838 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1679799345091900} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0.15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4248669074871358} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4372429183926998 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1148463453037344} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4759135727950766} - - {fileID: 4973205504095692} - - {fileID: 4163414707488626} - - {fileID: 4860428730037900} - - {fileID: 4090185576666620} - - {fileID: 4738281783443470} - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4374782728896950 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1326063871773226} - m_LocalRotation: {x: -0, y: -0, z: -0.036135223, w: 0.999347} - m_LocalPosition: {x: 0, y: 0.341, z: 0} - m_LocalScale: {x: 1.0441425, y: 0.19278127, z: 1.0441422} - m_Children: [] - m_Father: {fileID: 4547659310577522} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: -4.142} ---- !u!4 &4387319972833638 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1419399000231728} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.77, y: 0.26, z: 20} - m_Children: [] - m_Father: {fileID: 4721323319590288} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4401972798800426 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1060127683763250} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4417448006619290} - - {fileID: 4618952072988912} - - {fileID: 4874637605867312} - - {fileID: 4975429012998718} - - {fileID: 4122198474916958} - m_Father: {fileID: 4163414707488626} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4412211215558322 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1693583332082218} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0.15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4543132746757250} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4417448006619290 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1166161536296714} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0.15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4401972798800426} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4420472081310982 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1550625483787518} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0, y: -0.18299997, z: 0.50040054} - m_LocalScale: {x: 0.27602, y: 0.042489994, z: 0.13891} - m_Children: [] - m_Father: {fileID: 4543132746757250} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4440531283818028 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1036718751643974} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4608788594177934} - - {fileID: 4125965426428202} - m_Father: {fileID: 4090185576666620} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4463053708203254 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1032277567487600} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0.15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4547659310577522} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4489175365708016 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1538412416681886} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.77, y: 0.26, z: 20} - m_Children: [] - m_Father: {fileID: 4820170975554988} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4543132746757250 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1511448244239904} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4412211215558322} - - {fileID: 4295545484486642} - - {fileID: 4818148766583302} - - {fileID: 4420472081310982} - - {fileID: 4191741480206292} - m_Father: {fileID: 4973205504095692} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4547659310577522 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1730447000192316} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4463053708203254} - - {fileID: 4163625821714438} - - {fileID: 4643487051549420} - - {fileID: 4043521150123220} - - {fileID: 4374782728896950} - m_Father: {fileID: 4090185576666620} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4608788594177934 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1207275472187634} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.35, y: 0.13, z: 20} - m_Children: [] - m_Father: {fileID: 4440531283818028} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4612669422693706 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1623737045195386} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: 0, z: 0} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 4759135727950766} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4618952072988912 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1640548838061932} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4401972798800426} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4643487051549420 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1115585505442730} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: -0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4547659310577522} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4721323319590288 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1717920238940982} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4894538592068096} - - {fileID: 4387319972833638} - m_Father: {fileID: 4163414707488626} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4738281783443470 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1472886453686564} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0.84, y: 1.5, z: -9.559753} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4938936867781656} - - {fileID: 4855230491420082} - m_Father: {fileID: 4372429183926998} - m_RootOrder: 5 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4759135727950766 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1019040130223056} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0.0078571, y: 0.36904, z: -0.0078571} - m_LocalScale: {x: 0.5, y: 0.5, z: 0.5} - m_Children: - - {fileID: 4612669422693706} - - {fileID: 4937981289722198} - m_Father: {fileID: 4372429183926998} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4809538760796712 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1495140326038082} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.77, y: 0.26, z: 20} - m_Children: [] - m_Father: {fileID: 4089023012196426} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4818148766583302 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1517411571266264} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: -0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4543132746757250} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4820170975554988 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1908911473514362} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4316615647955974} - - {fileID: 4489175365708016} - m_Father: {fileID: 4973205504095692} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4839102916901406 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1707840480972208} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.35, y: 0.13, z: 20} - m_Children: [] - m_Father: {fileID: 4855230491420082} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4850826115460204 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1780298668326468} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.35, y: 0.13, z: 20} - m_Children: [] - m_Father: {fileID: 4089023012196426} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4855230491420082 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1118490959067888} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4839102916901406} - - {fileID: 4146027823984860} - m_Father: {fileID: 4738281783443470} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4860428730037900 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1083644460692486} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0.84, y: 1.5, z: -9.559753} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4248669074871358} - - {fileID: 4089023012196426} - m_Father: {fileID: 4372429183926998} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4874637605867312 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1119160234761140} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: -0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4401972798800426} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4879398375124082 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1460055155855532} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0, y: -0.18299997, z: 0.50040054} - m_LocalScale: {x: 0.27602, y: 0.042489994, z: 0.13891} - m_Children: [] - m_Father: {fileID: 4938936867781656} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4888429316354804 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1855266688095076} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0.15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4938936867781656} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4894538592068096 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1230785203868376} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.35, y: 0.13, z: 20} - m_Children: [] - m_Father: {fileID: 4721323319590288} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4915577572558104 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1655399711430926} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0, y: -0.18299997, z: 0.50040054} - m_LocalScale: {x: 0.27602, y: 0.042489994, z: 0.13891} - m_Children: [] - m_Father: {fileID: 4248669074871358} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4937981289722198 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1283864300740804} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: 0, z: 0} - m_LocalScale: {x: 0.01, y: 0.025, z: 0.01} - m_Children: [] - m_Father: {fileID: 4759135727950766} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4938936867781656 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1999690940405766} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4888429316354804} - - {fileID: 4078368073314408} - - {fileID: 4023872978181988} - - {fileID: 4879398375124082} - - {fileID: 4998859833614822} - m_Father: {fileID: 4738281783443470} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4973205504095692 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1817700244901568} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -18.1, y: 1.5, z: -9.559753} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4261013212553228} - - {fileID: 4543132746757250} - - {fileID: 4820170975554988} - m_Father: {fileID: 4372429183926998} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4975429012998718 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1913123685927870} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0, y: -0.18299997, z: 0.50040054} - m_LocalScale: {x: 0.27602, y: 0.042489994, z: 0.13891} - m_Children: [] - m_Father: {fileID: 4401972798800426} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4998859833614822 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1178344588676694} - m_LocalRotation: {x: -0, y: -0, z: -0.036135223, w: 0.999347} - m_LocalPosition: {x: 0, y: 0.341, z: 0} - m_LocalScale: {x: 1.0441425, y: 0.19278127, z: 1.0441422} - m_Children: [] - m_Father: {fileID: 4938936867781656} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: -4.142} ---- !u!20 &20249574135862722 -Camera: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1408232831101942} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.46666667, g: 0.5647059, b: 0.60784316, a: 1} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 0.6 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 4 - m_Depth: 2 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 1 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!20 &20461256959685446 -Camera: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1032277567487600} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.46666667, g: 0.5647059, b: 0.60784316, a: 1} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294950911 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!20 &20698829392063646 -Camera: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1693583332082218} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.46666667, g: 0.5647059, b: 0.60784316, a: 1} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294950911 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!20 &20824293318392576 -Camera: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1166161536296714} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.46666667, g: 0.5647059, b: 0.60784316, a: 1} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294950911 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!20 &20886270057421904 -Camera: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1679799345091900} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.46666667, g: 0.5647059, b: 0.60784316, a: 1} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294950911 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!20 &20969894216019868 -Camera: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1855266688095076} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.46666667, g: 0.5647059, b: 0.60784316, a: 1} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294950911 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!23 &23000313953293332 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1104285561328794} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23083013243699668 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1538412416681886} - m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23087407082491696 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1499864950410692} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23106919239663994 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1730447000192316} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: b0da1813c36914e678ba57f2790424e1, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23144213216548164 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1511448244239904} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23165959560319070 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1780298668326468} - m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23183453019996956 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1277627577235966} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: b0da1813c36914e678ba57f2790424e1, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23185188104928530 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1640548838061932} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23189145310999932 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1119160234761140} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23211890474760446 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1550625483787518} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23233818304104802 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1899934101036282} - m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23236608996151106 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1874858069831842} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23251368955219986 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1326063871773226} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23292970588553896 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1993066047395702} - m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23310558825950146 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1739239930433140} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23341744008000800 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1655399711430926} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23357581325744500 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1999690940405766} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: b0da1813c36914e678ba57f2790424e1, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23392637879397284 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1517411571266264} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23398093469604598 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1913123685927870} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23421736323192752 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1230785203868376} - m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23473223860544830 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1533469610404082} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23491236768280636 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1495140326038082} - m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23492882127774122 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1707840480972208} - m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23570499375499866 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1283864300740804} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23592439088075056 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1851446196430958} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23599136802246988 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1532540667329522} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23619187350806738 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1419399000231728} - m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23632454201028546 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1207275472187634} - m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23641427415559202 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1979837169441746} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23660591068400810 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1480973913292458} - m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23701671039184230 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1060127683763250} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: b0da1813c36914e678ba57f2790424e1, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23794318046219148 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1115585505442730} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23844901860044028 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1623737045195386} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: acba6bf2a290a496bb8989b42bf8698d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23877466885887120 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1622977628854186} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23892380659979872 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1614221853968290} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23966511415204298 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1460055155855532} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23977096569633444 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1178344588676694} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!33 &33022767511227186 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1999690940405766} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33084017896785664 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1614221853968290} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33122382056330570 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1622977628854186} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33159088425452110 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1326063871773226} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33203886542245800 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1532540667329522} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33217733682310280 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1499864950410692} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33260164656656074 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1119160234761140} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33263065535596452 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1655399711430926} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33374377061733760 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1104285561328794} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33375487932990328 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1533469610404082} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33381665793525412 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1979837169441746} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33391339757244194 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1993066047395702} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33461864521879796 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1115585505442730} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33471048937125694 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1550625483787518} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33490642010653766 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1480973913292458} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33495394254014352 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1060127683763250} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33498657528635338 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1511448244239904} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33506182739557962 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1178344588676694} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33508394712190790 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1495140326038082} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33508935184907494 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1899934101036282} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33520066304594230 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1538412416681886} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33521097570988880 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1851446196430958} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33574550919569692 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1623737045195386} - m_Mesh: {fileID: 4300002, guid: a9d8f499f5b9848438d280dc28b3b52e, type: 3} ---- !u!33 &33593948490422464 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1707840480972208} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33598340761627654 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1419399000231728} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33603579224617926 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1780298668326468} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33613173212918468 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1230785203868376} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33630981864968192 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1730447000192316} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33672694124524946 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1517411571266264} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33678750942580600 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1207275472187634} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33707046786369034 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1640548838061932} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33777269475212208 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1874858069831842} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33784303488748150 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1277627577235966} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33813723186024048 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1739239930433140} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33839396619905048 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1283864300740804} - m_Mesh: {fileID: 4300000, guid: a9d8f499f5b9848438d280dc28b3b52e, type: 3} ---- !u!33 &33965989747114456 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1460055155855532} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33973803526351230 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1913123685927870} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!54 &54145483456843648 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1969596566606862} - serializedVersion: 2 - m_Mass: 10 - m_Drag: 0.5 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 112 - m_CollisionDetection: 0 ---- !u!54 &54416501493853776 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1817700244901568} - serializedVersion: 2 - m_Mass: 10 - m_Drag: 0.5 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 112 - m_CollisionDetection: 0 ---- !u!54 &54616290575522960 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1083644460692486} - serializedVersion: 2 - m_Mass: 10 - m_Drag: 0.5 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 112 - m_CollisionDetection: 0 ---- !u!54 &54777439013240364 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1472886453686564} - serializedVersion: 2 - m_Mass: 10 - m_Drag: 0.5 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 112 - m_CollisionDetection: 0 ---- !u!54 &54786813764074612 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1909382423022668} - serializedVersion: 2 - m_Mass: 10 - m_Drag: 0.5 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 112 - m_CollisionDetection: 0 ---- !u!64 &64687717889352452 -MeshCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1283864300740804} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 3 - m_Convex: 0 - m_CookingOptions: 14 - m_SkinWidth: 0.01 - m_Mesh: {fileID: 4300000, guid: a9d8f499f5b9848438d280dc28b3b52e, type: 3} ---- !u!65 &65137122363770072 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1623737045195386} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 10000, y: 2.220446e-12, z: 10000} - m_Center: {x: 0, y: -3.111633e-13, z: 0} ---- !u!65 &65177541691534190 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1480973913292458} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65267459799989724 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1083644460692486} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65320662598926772 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1419399000231728} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65355355437149540 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1817700244901568} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65432908789117412 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1472886453686564} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65455258309594506 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1780298668326468} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65457923031945612 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1207275472187634} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65508887627785202 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1993066047395702} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65563168914267318 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1707840480972208} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65674116623153200 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1538412416681886} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65712219651821442 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1969596566606862} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65760791154338318 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1230785203868376} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65819040859489448 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1909382423022668} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65860898373705094 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1495140326038082} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65959685830105506 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1899934101036282} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!92 &92966420001434432 -Behaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1408232831101942} - m_Enabled: 1 ---- !u!114 &114007330774343768 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1148463453037344} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: e14a1d4accf634e3fb9b5bc94d023393, type: 3} - m_Name: - m_EditorClassIdentifier: - food: {fileID: 1699568242032636, guid: b59e4a7fd76af471cadd16e90133a084, type: 2} - badFood: {fileID: 1853821981215314, guid: 350871b865967466daa1f596193cc22e, type: 2} - numFood: 15 - numBadFood: 15 - respawnFood: 1 - range: 20 ---- !u!114 &114062657662370576 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1472886453686564} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: c66e6845309d241c78a6d77ee2567928, type: 3} - m_Name: - m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 9e7865ec29c894c2d8c1617b0fa392f9, type: 2} - agentParameters: - agentCameras: [] - agentRenderTextures: [] - maxStep: 0 - resetOnDone: 1 - onDemandDecision: 0 - numberOfActionsBetweenDecisions: 1 - area: {fileID: 1148463453037344} - turnSpeed: 300 - moveSpeed: 2 - normalMaterial: {fileID: 2100000, guid: b0da1813c36914e678ba57f2790424e1, type: 2} - badMaterial: {fileID: 2100000, guid: 88b9ae7af2c1748a0a1f63407587a601, type: 2} - goodMaterial: {fileID: 2100000, guid: c67450f290f3e4897bc40276a619e78d, type: 2} - frozenMaterial: {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} - myLaser: {fileID: 1118490959067888} - contribute: 1 - useVectorObs: 1 ---- !u!114 &114082336001058488 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1909382423022668} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: c66e6845309d241c78a6d77ee2567928, type: 3} - m_Name: - m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 9e7865ec29c894c2d8c1617b0fa392f9, type: 2} - agentParameters: - agentCameras: [] - agentRenderTextures: [] - maxStep: 0 - resetOnDone: 1 - onDemandDecision: 0 - numberOfActionsBetweenDecisions: 1 - area: {fileID: 1148463453037344} - turnSpeed: 300 - moveSpeed: 2 - normalMaterial: {fileID: 2100000, guid: b0da1813c36914e678ba57f2790424e1, type: 2} - badMaterial: {fileID: 2100000, guid: 88b9ae7af2c1748a0a1f63407587a601, type: 2} - goodMaterial: {fileID: 2100000, guid: c67450f290f3e4897bc40276a619e78d, type: 2} - frozenMaterial: {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} - myLaser: {fileID: 1717920238940982} - contribute: 1 - useVectorObs: 1 ---- !u!114 &114090124869684900 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1969596566606862} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} - m_Name: - m_EditorClassIdentifier: ---- !u!114 &114139780013492282 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1909382423022668} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} - m_Name: - m_EditorClassIdentifier: ---- !u!114 &114249416559197368 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1969596566606862} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: c66e6845309d241c78a6d77ee2567928, type: 3} - m_Name: - m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 9e7865ec29c894c2d8c1617b0fa392f9, type: 2} - agentParameters: - agentCameras: [] - agentRenderTextures: [] - maxStep: 0 - resetOnDone: 1 - onDemandDecision: 0 - numberOfActionsBetweenDecisions: 1 - area: {fileID: 1148463453037344} - turnSpeed: 300 - moveSpeed: 2 - normalMaterial: {fileID: 2100000, guid: b0da1813c36914e678ba57f2790424e1, type: 2} - badMaterial: {fileID: 2100000, guid: 88b9ae7af2c1748a0a1f63407587a601, type: 2} - goodMaterial: {fileID: 2100000, guid: c67450f290f3e4897bc40276a619e78d, type: 2} - frozenMaterial: {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} - myLaser: {fileID: 1036718751643974} - contribute: 1 - useVectorObs: 1 ---- !u!114 &114260856195490218 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1083644460692486} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: c66e6845309d241c78a6d77ee2567928, type: 3} - m_Name: - m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 9e7865ec29c894c2d8c1617b0fa392f9, type: 2} - agentParameters: - agentCameras: [] - agentRenderTextures: [] - maxStep: 0 - resetOnDone: 1 - onDemandDecision: 0 - numberOfActionsBetweenDecisions: 1 - area: {fileID: 1148463453037344} - turnSpeed: 300 - moveSpeed: 2 - normalMaterial: {fileID: 2100000, guid: b0da1813c36914e678ba57f2790424e1, type: 2} - badMaterial: {fileID: 2100000, guid: 88b9ae7af2c1748a0a1f63407587a601, type: 2} - goodMaterial: {fileID: 2100000, guid: c67450f290f3e4897bc40276a619e78d, type: 2} - frozenMaterial: {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} - myLaser: {fileID: 1035806283124670} - contribute: 1 - useVectorObs: 1 ---- !u!114 &114516767535109608 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1083644460692486} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} - m_Name: - m_EditorClassIdentifier: ---- !u!114 &114569279624194478 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1817700244901568} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} - m_Name: - m_EditorClassIdentifier: ---- !u!114 &114615827905664130 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1817700244901568} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: c66e6845309d241c78a6d77ee2567928, type: 3} - m_Name: - m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: dff7429d656234fed84c4fac2a7a683c, type: 2} - agentParameters: - agentCameras: [] - agentRenderTextures: [] - maxStep: 0 - resetOnDone: 1 - onDemandDecision: 0 - numberOfActionsBetweenDecisions: 1 - area: {fileID: 1148463453037344} - turnSpeed: 300 - moveSpeed: 2 - normalMaterial: {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} - badMaterial: {fileID: 2100000, guid: 88b9ae7af2c1748a0a1f63407587a601, type: 2} - goodMaterial: {fileID: 2100000, guid: c67450f290f3e4897bc40276a619e78d, type: 2} - frozenMaterial: {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} - myLaser: {fileID: 1908911473514362} - contribute: 0 - useVectorObs: 1 ---- !u!114 &114688308485477898 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1472886453686564} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} - m_Name: - m_EditorClassIdentifier: diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Prefabs/FoodCollectorTeachingArea.prefab.meta b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Prefabs/FoodCollectorTeachingArea.prefab.meta deleted file mode 100644 index fbaf35a0be..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Prefabs/FoodCollectorTeachingArea.prefab.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 4e556f5e95e27473da078d43fcea9c54 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 100100000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Prefabs/VisualFoodCollectorArea.prefab b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Prefabs/VisualFoodCollectorArea.prefab index 262370c07a..7650d93591 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Prefabs/VisualFoodCollectorArea.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Prefabs/VisualFoodCollectorArea.prefab @@ -9,171 +9,176 @@ Prefab: m_Modifications: [] m_RemovedComponents: [] m_ParentPrefab: {fileID: 0} - m_RootGameObject: {fileID: 1373903370712748} + m_RootGameObject: {fileID: 1145096862361766} m_IsPrefabParent: 1 ---- !u!1 &1006947046326398 +--- !u!1 &1014600383792522 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4705215304218418} - - component: {fileID: 33021456786405022} - - component: {fileID: 65533246245578526} - - component: {fileID: 23492609137453340} + - component: {fileID: 4894325195052674} + - component: {fileID: 33398962914955792} + - component: {fileID: 23417508503027462} m_Layer: 0 - m_Name: Cube (7) + m_Name: eye m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1020706016898032 +--- !u!1 &1078242976093628 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4108818069486140} - - component: {fileID: 20890573870796418} + - component: {fileID: 4600586535679462} + - component: {fileID: 33311110033190690} + - component: {fileID: 23553176880752320} m_Layer: 0 - m_Name: AgentCamera + m_Name: mouth m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1063860219685290 +--- !u!1 &1079907389665764 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4273778626365262} - - component: {fileID: 20184829288622090} + - component: {fileID: 4175232870171330} + - component: {fileID: 33834256339971282} + - component: {fileID: 23493511592802438} + - component: {fileID: 65589377080510240} m_Layer: 0 - m_Name: AgentCamera - m_TagString: Untagged + m_Name: Floor + m_TagString: ground m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1071481650937550 +--- !u!1 &1121462989228714 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4281205317849046} + - component: {fileID: 4949994601996280} + - component: {fileID: 33639571011425304} + - component: {fileID: 65918024835619606} + - component: {fileID: 23940763941071586} m_Layer: 0 - m_Name: Laser + m_Name: Cube m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1113464549287688 +--- !u!1 &1122318382191954 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4822141438928022} - - component: {fileID: 33592911429918398} - - component: {fileID: 23610093730846242} + - component: {fileID: 4125811064954314} + - component: {fileID: 33064880745139274} + - component: {fileID: 23593994518387712} m_Layer: 0 - m_Name: eye + m_Name: Headband m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1154613458410576 +--- !u!1 &1135489216937854 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4797041758268616} - - component: {fileID: 33740040246150820} - - component: {fileID: 65393336763004480} - - component: {fileID: 23385913893104254} + - component: {fileID: 4146134314971664} + - component: {fileID: 33007201079611444} + - component: {fileID: 23187340099814854} m_Layer: 0 - m_Name: Cube + m_Name: eye m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1169383371125934 +--- !u!1 &1145096862361766 GameObject: m_ObjectHideFlags: 0 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4200764861126498} - - component: {fileID: 65932430430952094} - - component: {fileID: 54338553804475180} - - component: {fileID: 114080992426104682} + - component: {fileID: 4307641258646068} + - component: {fileID: 114422602055470240} m_Layer: 0 - m_Name: Agent (3) - m_TagString: agent + m_Name: VisualFoodCollectorArea + m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1225831640572844 +--- !u!1 &1165679820726490 GameObject: - m_ObjectHideFlags: 1 + m_ObjectHideFlags: 0 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4502740022376566} - - component: {fileID: 33230664802658248} - - component: {fileID: 23134278246637528} + - component: {fileID: 4755605325663294} + - component: {fileID: 65589463239440526} + - component: {fileID: 54289954206234004} + - component: {fileID: 114724832030926412} + - component: {fileID: 114380897261200276} + - component: {fileID: 114326390494230518} m_Layer: 0 - m_Name: AgentCubeWithCamera_Blue - m_TagString: Untagged + m_Name: Agent (3) + m_TagString: agent m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1295027271656972 +--- !u!1 &1179319070824364 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4494755947957094} - - component: {fileID: 33555959561751530} - - component: {fileID: 23339485608791352} + - component: {fileID: 4607515801962882} + - component: {fileID: 33656934514913342} + - component: {fileID: 23075609872029122} m_Layer: 0 - m_Name: eye + m_Name: Headband m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1311950243820864 +--- !u!1 &1193448108562946 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4667478165396870} - - component: {fileID: 33141703365320568} - - component: {fileID: 65220200117116584} - - component: {fileID: 23720538892684934} + - component: {fileID: 4301723466142562} + - component: {fileID: 33487151372684152} + - component: {fileID: 65682238052370530} + - component: {fileID: 23805066794631192} m_Layer: 0 m_Name: Cube (7) m_TagString: Untagged @@ -181,16 +186,16 @@ GameObject: m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1321332639796692 +--- !u!1 &1214419816843726 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4420934584777238} - - component: {fileID: 33715293034083534} - - component: {fileID: 23140602843340680} + - component: {fileID: 4904660447404230} + - component: {fileID: 33608516916290782} + - component: {fileID: 23451884464532582} m_Layer: 0 m_Name: eye m_TagString: Untagged @@ -198,134 +203,140 @@ GameObject: m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1335947341879056 +--- !u!1 &1225293932999958 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4844086258046424} + - component: {fileID: 4581276532766838} + - component: {fileID: 33173463307667788} + - component: {fileID: 23949188537607370} m_Layer: 0 - m_Name: Laser + m_Name: Headband m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1354074425749060 +--- !u!1 &1270392021614266 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4344874082698640} - - component: {fileID: 33443085460647932} - - component: {fileID: 23135679569986010} + - component: {fileID: 4881244176857118} + - component: {fileID: 33112832585894242} + - component: {fileID: 23018451675943198} m_Layer: 0 - m_Name: eye + m_Name: mouth m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1360490281124436 +--- !u!1 &1307818939507544 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4055603687853444} - - component: {fileID: 33066230622789280} - - component: {fileID: 23859266924290150} + - component: {fileID: 4812724525373012} m_Layer: 0 - m_Name: eye + m_Name: Laser m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1373903370712748 +--- !u!1 &1317136368302180 GameObject: m_ObjectHideFlags: 0 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4612263362188236} - - component: {fileID: 114552799170750468} + - component: {fileID: 4701006000910250} + - component: {fileID: 65739180902805812} + - component: {fileID: 54578239364845406} + - component: {fileID: 114942033465821694} + - component: {fileID: 114869844339180154} + - component: {fileID: 114429222608880102} m_Layer: 0 - m_Name: VisualFoodCollectorArea - m_TagString: Untagged + m_Name: Agent (1) + m_TagString: agent m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1395664386782538 +--- !u!1 &1353209702154624 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4472914162993568} - - component: {fileID: 33942393414274458} - - component: {fileID: 23053845074447322} + - component: {fileID: 4928645405444414} + - component: {fileID: 33433944005396330} + - component: {fileID: 65722116964617048} + - component: {fileID: 23285836828984498} m_Layer: 0 - m_Name: mouth + m_Name: Cube m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1405292418219056 +--- !u!1 &1358824403081850 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4044816681514718} - - component: {fileID: 33602396174560854} - - component: {fileID: 23131301383334322} + - component: {fileID: 4351896595866570} + - component: {fileID: 33220378165577016} + - component: {fileID: 23183609615186614} m_Layer: 0 - m_Name: Headband + m_Name: eye m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1429079117656474 +--- !u!1 &1363680577697298 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4906730824288932} - - component: {fileID: 33608906352258716} - - component: {fileID: 65110456292660578} - - component: {fileID: 23199089093492790} + - component: {fileID: 4555536896909124} + - component: {fileID: 33437997913177172} + - component: {fileID: 23618215726505526} m_Layer: 0 - m_Name: Cube + m_Name: eye m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1443673828307814 +--- !u!1 &1373801553976666 GameObject: m_ObjectHideFlags: 0 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4363808360324110} - - component: {fileID: 65178766753981896} - - component: {fileID: 54088750604404324} - - component: {fileID: 114257846618384398} + - component: {fileID: 4875680222761558} + - component: {fileID: 65913236404058334} + - component: {fileID: 54260679202702464} + - component: {fileID: 114556471765155272} + - component: {fileID: 114484596947519388} + - component: {fileID: 114036270357198286} m_Layer: 0 m_Name: Agent m_TagString: agent @@ -333,186 +344,181 @@ GameObject: m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1449550305145294 +--- !u!1 &1399553220224106 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4647279353448348} - - component: {fileID: 33311378397245648} - - component: {fileID: 23936720454023906} + - component: {fileID: 4340505092436632} + - component: {fileID: 33104902330510722} + - component: {fileID: 23861339133111532} m_Layer: 0 - m_Name: Headband + m_Name: mouth m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1488027870493332 +--- !u!1 &1420568803306334 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4070809751385952} - - component: {fileID: 33331636073791150} - - component: {fileID: 23650098059578888} - - component: {fileID: 65059201626075844} + - component: {fileID: 4662781185182020} + - component: {fileID: 33985812240333934} + - component: {fileID: 23229943995234896} m_Layer: 0 - m_Name: Floor - m_TagString: ground + m_Name: mouth + m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1509336059309658 +--- !u!1 &1445621402098048 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4121151068804744} - - component: {fileID: 33249232817837298} - - component: {fileID: 65588617864146588} - - component: {fileID: 23853077636474208} + - component: {fileID: 4322704766028316} + - component: {fileID: 33618847153425232} + - component: {fileID: 23842657651682012} m_Layer: 0 - m_Name: Cube (7) + m_Name: AgentCubeWithCamera_Blue m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1580603984718180 +--- !u!1 &1446822082845914 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4955619310742228} + - component: {fileID: 4257515355332200} + - component: {fileID: 33845612629305896} + - component: {fileID: 65357436224164412} + - component: {fileID: 23328732370008150} m_Layer: 0 - m_Name: Laser + m_Name: Cube (7) m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1606019544489478 +--- !u!1 &1494700324769970 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4784134879506728} - - component: {fileID: 33737671320950324} - - component: {fileID: 23371983477781584} + - component: {fileID: 4172060812915152} + - component: {fileID: 20380145723616022} m_Layer: 0 - m_Name: AgentCubeWithCamera_Blue + m_Name: AgentCamera m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1612432655468506 +--- !u!1 &1501061473632398 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4247901987570806} - - component: {fileID: 33822080881909152} - - component: {fileID: 65901695084016898} - - component: {fileID: 23953636389523694} + - component: {fileID: 4293457799811454} + - component: {fileID: 20696931947702132} m_Layer: 0 - m_Name: Cube + m_Name: AgentCamera m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1637229412367504 +--- !u!1 &1558211054879664 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4211128169915068} - - component: {fileID: 33851667657955130} - - component: {fileID: 23670615737810094} + - component: {fileID: 4424041022055208} + - component: {fileID: 33497420207122002} + - component: {fileID: 23710656781028902} m_Layer: 0 - m_Name: mouth + m_Name: eye m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1644033386471554 +--- !u!1 &1598006908151838 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4933466107303308} - - component: {fileID: 33937136877863552} - - component: {fileID: 23446453883495642} + - component: {fileID: 4254909729027978} + - component: {fileID: 20863703825242712} m_Layer: 0 - m_Name: AgentCubeWithCamera_Blue + m_Name: AgentCamera m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1645871539393100 +--- !u!1 &1605878135356198 GameObject: - m_ObjectHideFlags: 1 + m_ObjectHideFlags: 0 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4975251187012474} + - component: {fileID: 4470879573182666} m_Layer: 0 - m_Name: Laser - m_TagString: Untagged + m_Name: Court + m_TagString: wall m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1661627863960872 +--- !u!1 &1644999083152214 GameObject: - m_ObjectHideFlags: 0 + m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4058814921636558} - - component: {fileID: 65441205843344002} - - component: {fileID: 54753735395303746} - - component: {fileID: 114625025344622832} + - component: {fileID: 4048009962245794} + - component: {fileID: 20363738094913048} m_Layer: 0 - m_Name: Agent (1) - m_TagString: agent + m_Name: AgentCamera + m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1736278560078538 +--- !u!1 &1661059072361910 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4626520968249264} - - component: {fileID: 33891928207286534} - - component: {fileID: 23503300393546632} + - component: {fileID: 4340962090815832} + - component: {fileID: 33251900124875064} + - component: {fileID: 23381564833934054} m_Layer: 0 m_Name: Headband m_TagString: Untagged @@ -520,865 +526,867 @@ GameObject: m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1784936519446344 +--- !u!1 &1669654006840182 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4462781027002900} - - component: {fileID: 33938174309896050} - - component: {fileID: 65744343711438542} - - component: {fileID: 23131443350435860} + - component: {fileID: 4573279692837578} + - component: {fileID: 33007244417731778} + - component: {fileID: 23763015239766136} m_Layer: 0 - m_Name: Cube + m_Name: eye m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1832473509294508 +--- !u!1 &1681006282972324 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4849354229057266} - - component: {fileID: 33375700058842592} - - component: {fileID: 23448090049970378} + - component: {fileID: 4300712180700360} + - component: {fileID: 33706362889224244} + - component: {fileID: 23348621273683864} m_Layer: 0 - m_Name: Headband + m_Name: eye m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1865539321937198 +--- !u!1 &1682998971059402 GameObject: - m_ObjectHideFlags: 0 + m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4294807322863768} + - component: {fileID: 4770596166774728} + - component: {fileID: 33437662427228988} + - component: {fileID: 65949213028989648} + - component: {fileID: 23249721641656990} m_Layer: 0 - m_Name: Court - m_TagString: wall + m_Name: Cube + m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1872164461544240 +--- !u!1 &1715767492872126 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4290045604855054} - - component: {fileID: 33597693146101122} - - component: {fileID: 23174089715016186} + - component: {fileID: 4151762782664136} + - component: {fileID: 33795060429981700} + - component: {fileID: 23746622875505272} m_Layer: 0 - m_Name: mouth + m_Name: AgentCubeWithCamera_Blue m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1901325889748826 +--- !u!1 &1725685797327414 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4647732712799360} - - component: {fileID: 33355207304875418} - - component: {fileID: 23152468287192832} + - component: {fileID: 4352047625778706} + - component: {fileID: 33441042292940994} + - component: {fileID: 23477035516712264} m_Layer: 0 - m_Name: eye + m_Name: AgentCubeWithCamera_Blue m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1902315786566738 +--- !u!1 &1779831409734062 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4103156959565466} - - component: {fileID: 33407893211590960} - - component: {fileID: 23360178695717674} + - component: {fileID: 4393500247777624} m_Layer: 0 - m_Name: AgentCubeWithCamera_Blue + m_Name: Laser m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1916152952954816 +--- !u!1 &1841305616332738 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4850267975317880} - - component: {fileID: 33923970477956536} - - component: {fileID: 23640165886292070} + - component: {fileID: 4210008090221700} + - component: {fileID: 33994336694509784} + - component: {fileID: 23937488998945520} m_Layer: 0 - m_Name: eye + m_Name: AgentCubeWithCamera_Blue m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1922169902050698 +--- !u!1 &1873124424911068 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4426394162365402} - - component: {fileID: 33728199568902842} - - component: {fileID: 23897348706436050} - - component: {fileID: 64906907870115852} + - component: {fileID: 4232991942739224} + - component: {fileID: 33657416383297052} + - component: {fileID: 65738389332670280} + - component: {fileID: 23431158324231992} m_Layer: 0 - m_Name: WallsOuter - m_TagString: wall + m_Name: Cube + m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1938841407205532 +--- !u!1 &1898252046043334 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4309604379810914} - - component: {fileID: 33777054715953836} - - component: {fileID: 65285650247585738} - - component: {fileID: 23784087430264244} + - component: {fileID: 4198767113946998} m_Layer: 0 - m_Name: Cube (7) + m_Name: Laser m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1948317836081598 +--- !u!1 &1900094563283840 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4382031080713888} - - component: {fileID: 33628516318414646} - - component: {fileID: 23440411431868422} + - component: {fileID: 4083604007501892} m_Layer: 0 - m_Name: eye + m_Name: Laser m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1951009134417952 +--- !u!1 &1907470464862992 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4031837848228944} - - component: {fileID: 20825339371991526} + - component: {fileID: 4888051724348932} + - component: {fileID: 33291922297337334} + - component: {fileID: 65057825353511568} + - component: {fileID: 23615217600355662} m_Layer: 0 - m_Name: AgentCamera + m_Name: Cube (7) m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1964761937961574 +--- !u!1 &1930612303318000 GameObject: - m_ObjectHideFlags: 0 + m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4273593104376064} - - component: {fileID: 65540878552782854} - - component: {fileID: 54626777054134360} - - component: {fileID: 114855044884614820} + - component: {fileID: 4981707683050382} + - component: {fileID: 33345590301219576} + - component: {fileID: 65015492267237124} + - component: {fileID: 23755052841482864} m_Layer: 0 - m_Name: Agent (2) - m_TagString: agent + m_Name: Cube (7) + m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1972265198832878 +--- !u!1 &1939112378710628 GameObject: - m_ObjectHideFlags: 1 + m_ObjectHideFlags: 0 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4090277716571272} - - component: {fileID: 33930320050166542} - - component: {fileID: 23220910664895598} + - component: {fileID: 4796822391050334} + - component: {fileID: 65814032642346288} + - component: {fileID: 54017759129031336} + - component: {fileID: 114799611184247278} + - component: {fileID: 114729119221978826} + - component: {fileID: 114322691115031348} m_Layer: 0 - m_Name: mouth - m_TagString: Untagged + m_Name: Agent (2) + m_TagString: agent m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1976543580957312 +--- !u!1 &1971119195936814 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4817299629019676} - - component: {fileID: 20416796192002010} + - component: {fileID: 4980287366775932} + - component: {fileID: 33886886862843664} + - component: {fileID: 23021469306627962} + - component: {fileID: 64888510637481462} m_Layer: 0 - m_Name: AgentCamera - m_TagString: Untagged + m_Name: WallsOuter + m_TagString: wall m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!4 &4031837848228944 +--- !u!4 &4048009962245794 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1951009134417952} + m_GameObject: {fileID: 1644999083152214} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} m_LocalPosition: {x: 0, y: 0, z: 0.15} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] - m_Father: {fileID: 4502740022376566} + m_Father: {fileID: 4151762782664136} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4044816681514718 +--- !u!4 &4083604007501892 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1405292418219056} + m_GameObject: {fileID: 1900094563283840} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 4928645405444414} + - {fileID: 4301723466142562} + m_Father: {fileID: 4755605325663294} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!4 &4125811064954314 +Transform: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1122318382191954} m_LocalRotation: {x: -0, y: -0, z: 0.016506119, w: 0.9998638} m_LocalPosition: {x: 0, y: 0.341, z: 0} m_LocalScale: {x: 1.0441425, y: 0.19278127, z: 1.0441422} m_Children: [] - m_Father: {fileID: 4502740022376566} + m_Father: {fileID: 4210008090221700} m_RootOrder: 4 m_LocalEulerAnglesHint: {x: 0, y: -179.99998, z: 1.8920001} ---- !u!4 &4055603687853444 +--- !u!4 &4146134314971664 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1360490281124436} + m_GameObject: {fileID: 1135489216937854} m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: -0.29999995, y: 0.07399994, z: 0.50040054} + m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} m_Children: [] - m_Father: {fileID: 4502740022376566} - m_RootOrder: 2 + m_Father: {fileID: 4322704766028316} + m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4058814921636558 +--- !u!4 &4151762782664136 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1661627863960872} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -30.56, y: 2, z: -9.559753} + m_GameObject: {fileID: 1715767492872126} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - - {fileID: 4103156959565466} - - {fileID: 4844086258046424} - m_Father: {fileID: 4612263362188236} - m_RootOrder: 2 + - {fileID: 4048009962245794} + - {fileID: 4300712180700360} + - {fileID: 4904660447404230} + - {fileID: 4600586535679462} + - {fileID: 4607515801962882} + m_Father: {fileID: 4796822391050334} + m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4070809751385952 +--- !u!4 &4172060812915152 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1488027870493332} + m_GameObject: {fileID: 1494700324769970} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0.15} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 4210008090221700} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!4 &4175232870171330 +Transform: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1079907389665764} m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} m_LocalPosition: {x: -0, y: 0, z: 0} m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} m_Children: [] - m_Father: {fileID: 4294807322863768} + m_Father: {fileID: 4470879573182666} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4090277716571272 +--- !u!4 &4198767113946998 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1972265198832878} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0, y: -0.18299997, z: 0.50040054} - m_LocalScale: {x: 0.27602, y: 0.042489994, z: 0.13891} - m_Children: [] - m_Father: {fileID: 4103156959565466} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4103156959565466 + m_GameObject: {fileID: 1898252046043334} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 4232991942739224} + - {fileID: 4257515355332200} + m_Father: {fileID: 4875680222761558} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!4 &4210008090221700 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1902315786566738} + m_GameObject: {fileID: 1841305616332738} m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - - {fileID: 4273778626365262} - - {fileID: 4344874082698640} - - {fileID: 4850267975317880} - - {fileID: 4090277716571272} - - {fileID: 4626520968249264} - m_Father: {fileID: 4058814921636558} + - {fileID: 4172060812915152} + - {fileID: 4424041022055208} + - {fileID: 4555536896909124} + - {fileID: 4340505092436632} + - {fileID: 4125811064954314} + m_Father: {fileID: 4755605325663294} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!4 &4232991942739224 +Transform: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1873124424911068} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0, y: -0.060000002, z: 10} + m_LocalScale: {x: 0.35, y: 0.13, z: 20} + m_Children: [] + m_Father: {fileID: 4198767113946998} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4108818069486140 +--- !u!4 &4254909729027978 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1020706016898032} + m_GameObject: {fileID: 1598006908151838} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} m_LocalPosition: {x: 0, y: 0, z: 0.15} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] - m_Father: {fileID: 4933466107303308} + m_Father: {fileID: 4322704766028316} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4121151068804744 +--- !u!4 &4257515355332200 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1509336059309658} + m_GameObject: {fileID: 1446822082845914} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} m_LocalPosition: {x: 0, y: -0.060000002, z: 10} m_LocalScale: {x: 0.77, y: 0.26, z: 20} m_Children: [] - m_Father: {fileID: 4955619310742228} + m_Father: {fileID: 4198767113946998} m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4200764861126498 +--- !u!4 &4293457799811454 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1169383371125934} + m_GameObject: {fileID: 1501061473632398} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -30.56, y: 2, z: -9.559753} + m_LocalPosition: {x: 0, y: 0, z: 0.15} m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4502740022376566} - - {fileID: 4281205317849046} - m_Father: {fileID: 4612263362188236} - m_RootOrder: 4 + m_Children: [] + m_Father: {fileID: 4352047625778706} + m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4211128169915068 +--- !u!4 &4300712180700360 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1637229412367504} + m_GameObject: {fileID: 1681006282972324} m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0, y: -0.18299997, z: 0.50040054} - m_LocalScale: {x: 0.27602, y: 0.042489994, z: 0.13891} + m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} + m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} m_Children: [] - m_Father: {fileID: 4784134879506728} - m_RootOrder: 3 + m_Father: {fileID: 4151762782664136} + m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4247901987570806 +--- !u!4 &4301723466142562 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1612432655468506} + m_GameObject: {fileID: 1193448108562946} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.35, y: 0.13, z: 20} + m_LocalScale: {x: 0.77, y: 0.26, z: 20} m_Children: [] - m_Father: {fileID: 4844086258046424} - m_RootOrder: 0 + m_Father: {fileID: 4083604007501892} + m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4273593104376064 +--- !u!4 &4307641258646068 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1964761937961574} + m_GameObject: {fileID: 1145096862361766} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -30.56, y: 2, z: -9.559753} + m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - - {fileID: 4784134879506728} - - {fileID: 4975251187012474} - m_Father: {fileID: 4612263362188236} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4273778626365262 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1063860219685290} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0.15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4103156959565466} + - {fileID: 4470879573182666} + - {fileID: 4875680222761558} + - {fileID: 4701006000910250} + - {fileID: 4796822391050334} + - {fileID: 4755605325663294} + m_Father: {fileID: 0} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4281205317849046 +--- !u!4 &4322704766028316 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1071481650937550} + m_GameObject: {fileID: 1445621402098048} m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - - {fileID: 4462781027002900} - - {fileID: 4667478165396870} - m_Father: {fileID: 4200764861126498} - m_RootOrder: 1 + - {fileID: 4254909729027978} + - {fileID: 4146134314971664} + - {fileID: 4351896595866570} + - {fileID: 4881244176857118} + - {fileID: 4340962090815832} + m_Father: {fileID: 4701006000910250} + m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4290045604855054 +--- !u!4 &4340505092436632 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1872164461544240} + m_GameObject: {fileID: 1399553220224106} m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} m_LocalPosition: {x: 0, y: -0.18299997, z: 0.50040054} m_LocalScale: {x: 0.27602, y: 0.042489994, z: 0.13891} m_Children: [] - m_Father: {fileID: 4933466107303308} + m_Father: {fileID: 4210008090221700} m_RootOrder: 3 m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4294807322863768 +--- !u!4 &4340962090815832 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1865539321937198} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0.5, z: 0} + m_GameObject: {fileID: 1661059072361910} + m_LocalRotation: {x: -0, y: -0, z: 0.016506119, w: 0.9998638} + m_LocalPosition: {x: 0, y: 0.341, z: 0} + m_LocalScale: {x: 1.0441425, y: 0.19278127, z: 1.0441422} + m_Children: [] + m_Father: {fileID: 4322704766028316} + m_RootOrder: 4 + m_LocalEulerAnglesHint: {x: 0, y: -179.99998, z: 1.8920001} +--- !u!4 &4351896595866570 +Transform: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1358824403081850} + m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} + m_LocalPosition: {x: -0.29999995, y: 0.07399994, z: 0.50040054} + m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} + m_Children: [] + m_Father: {fileID: 4322704766028316} + m_RootOrder: 2 + m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} +--- !u!4 &4352047625778706 +Transform: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1725685797327414} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - - {fileID: 4070809751385952} - - {fileID: 4426394162365402} - m_Father: {fileID: 4612263362188236} + - {fileID: 4293457799811454} + - {fileID: 4894325195052674} + - {fileID: 4573279692837578} + - {fileID: 4662781185182020} + - {fileID: 4581276532766838} + m_Father: {fileID: 4875680222761558} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4309604379810914 +--- !u!4 &4393500247777624 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1938841407205532} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.77, y: 0.26, z: 20} - m_Children: [] - m_Father: {fileID: 4975251187012474} + m_GameObject: {fileID: 1779831409734062} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 4949994601996280} + - {fileID: 4981707683050382} + m_Father: {fileID: 4796822391050334} m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4344874082698640 +--- !u!4 &4424041022055208 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1354074425749060} + m_GameObject: {fileID: 1558211054879664} m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} m_Children: [] - m_Father: {fileID: 4103156959565466} + m_Father: {fileID: 4210008090221700} m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4363808360324110 +--- !u!4 &4470879573182666 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1443673828307814} + m_GameObject: {fileID: 1605878135356198} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -30.56, y: 2, z: -9.559753} + m_LocalPosition: {x: 0, y: 0.5, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - - {fileID: 4933466107303308} - - {fileID: 4955619310742228} - m_Father: {fileID: 4612263362188236} - m_RootOrder: 1 + - {fileID: 4175232870171330} + - {fileID: 4980287366775932} + m_Father: {fileID: 4307641258646068} + m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4382031080713888 +--- !u!4 &4555536896909124 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1948317836081598} + m_GameObject: {fileID: 1363680577697298} m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} m_LocalPosition: {x: -0.29999995, y: 0.07399994, z: 0.50040054} m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} m_Children: [] - m_Father: {fileID: 4933466107303308} + m_Father: {fileID: 4210008090221700} m_RootOrder: 2 m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4420934584777238 +--- !u!4 &4573279692837578 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1321332639796692} + m_GameObject: {fileID: 1669654006840182} m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} m_LocalPosition: {x: -0.29999995, y: 0.07399994, z: 0.50040054} m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} m_Children: [] - m_Father: {fileID: 4784134879506728} + m_Father: {fileID: 4352047625778706} m_RootOrder: 2 m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4426394162365402 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1922169902050698} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: 0, z: 0} - m_LocalScale: {x: 0.01, y: 0.025, z: 0.01} - m_Children: [] - m_Father: {fileID: 4294807322863768} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4462781027002900 +--- !u!4 &4581276532766838 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1784936519446344} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.35, y: 0.13, z: 20} + m_GameObject: {fileID: 1225293932999958} + m_LocalRotation: {x: -0, y: -0, z: 0.016506119, w: 0.9998638} + m_LocalPosition: {x: 0, y: 0.341, z: 0} + m_LocalScale: {x: 1.0441425, y: 0.19278127, z: 1.0441422} m_Children: [] - m_Father: {fileID: 4281205317849046} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4472914162993568 + m_Father: {fileID: 4352047625778706} + m_RootOrder: 4 + m_LocalEulerAnglesHint: {x: 0, y: -179.99998, z: 1.8920001} +--- !u!4 &4600586535679462 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1395664386782538} + m_GameObject: {fileID: 1078242976093628} m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} m_LocalPosition: {x: 0, y: -0.18299997, z: 0.50040054} m_LocalScale: {x: 0.27602, y: 0.042489994, z: 0.13891} m_Children: [] - m_Father: {fileID: 4502740022376566} + m_Father: {fileID: 4151762782664136} m_RootOrder: 3 m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4494755947957094 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1295027271656972} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4933466107303308} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4502740022376566 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1225831640572844} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4031837848228944} - - {fileID: 4647732712799360} - - {fileID: 4055603687853444} - - {fileID: 4472914162993568} - - {fileID: 4044816681514718} - m_Father: {fileID: 4200764861126498} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4612263362188236 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1373903370712748} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4294807322863768} - - {fileID: 4363808360324110} - - {fileID: 4058814921636558} - - {fileID: 4273593104376064} - - {fileID: 4200764861126498} - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4626520968249264 +--- !u!4 &4607515801962882 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1736278560078538} + m_GameObject: {fileID: 1179319070824364} m_LocalRotation: {x: -0, y: -0, z: 0.016506119, w: 0.9998638} m_LocalPosition: {x: 0, y: 0.341, z: 0} m_LocalScale: {x: 1.0441425, y: 0.19278127, z: 1.0441422} m_Children: [] - m_Father: {fileID: 4103156959565466} + m_Father: {fileID: 4151762782664136} m_RootOrder: 4 m_LocalEulerAnglesHint: {x: 0, y: -179.99998, z: 1.8920001} ---- !u!4 &4647279353448348 +--- !u!4 &4662781185182020 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1449550305145294} - m_LocalRotation: {x: -0, y: -0, z: 0.016506119, w: 0.9998638} - m_LocalPosition: {x: 0, y: 0.341, z: 0} - m_LocalScale: {x: 1.0441425, y: 0.19278127, z: 1.0441422} + m_GameObject: {fileID: 1420568803306334} + m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} + m_LocalPosition: {x: 0, y: -0.18299997, z: 0.50040054} + m_LocalScale: {x: 0.27602, y: 0.042489994, z: 0.13891} m_Children: [] - m_Father: {fileID: 4784134879506728} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: -179.99998, z: 1.8920001} ---- !u!4 &4647732712799360 + m_Father: {fileID: 4352047625778706} + m_RootOrder: 3 + m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} +--- !u!4 &4701006000910250 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1901325889748826} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4502740022376566} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4667478165396870 + m_GameObject: {fileID: 1317136368302180} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -30.56, y: 2, z: -9.559753} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 4322704766028316} + - {fileID: 4812724525373012} + m_Father: {fileID: 4307641258646068} + m_RootOrder: 2 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!4 &4755605325663294 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1311950243820864} + m_GameObject: {fileID: 1165679820726490} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.77, y: 0.26, z: 20} - m_Children: [] - m_Father: {fileID: 4281205317849046} - m_RootOrder: 1 + m_LocalPosition: {x: -30.56, y: 2, z: -9.559753} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 4210008090221700} + - {fileID: 4083604007501892} + m_Father: {fileID: 4307641258646068} + m_RootOrder: 4 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4705215304218418 +--- !u!4 &4770596166774728 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1006947046326398} + m_GameObject: {fileID: 1682998971059402} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.77, y: 0.26, z: 20} + m_LocalScale: {x: 0.35, y: 0.13, z: 20} m_Children: [] - m_Father: {fileID: 4844086258046424} - m_RootOrder: 1 + m_Father: {fileID: 4812724525373012} + m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4784134879506728 +--- !u!4 &4796822391050334 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1606019544489478} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} + m_GameObject: {fileID: 1939112378710628} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -30.56, y: 2, z: -9.559753} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - - {fileID: 4817299629019676} - - {fileID: 4822141438928022} - - {fileID: 4420934584777238} - - {fileID: 4211128169915068} - - {fileID: 4647279353448348} - m_Father: {fileID: 4273593104376064} - m_RootOrder: 0 + - {fileID: 4151762782664136} + - {fileID: 4393500247777624} + m_Father: {fileID: 4307641258646068} + m_RootOrder: 3 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4797041758268616 +--- !u!4 &4812724525373012 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1154613458410576} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: -0.060000002, z: 10} - m_LocalScale: {x: 0.35, y: 0.13, z: 20} - m_Children: [] - m_Father: {fileID: 4955619310742228} - m_RootOrder: 0 + m_GameObject: {fileID: 1307818939507544} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 4770596166774728} + - {fileID: 4888051724348932} + m_Father: {fileID: 4701006000910250} + m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4817299629019676 +--- !u!4 &4875680222761558 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1976543580957312} + m_GameObject: {fileID: 1373801553976666} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0.15} + m_LocalPosition: {x: -30.56, y: 2, z: -9.559753} m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4784134879506728} - m_RootOrder: 0 + m_Children: + - {fileID: 4352047625778706} + - {fileID: 4198767113946998} + m_Father: {fileID: 4307641258646068} + m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4822141438928022 +--- !u!4 &4881244176857118 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1113464549287688} + m_GameObject: {fileID: 1270392021614266} m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} + m_LocalPosition: {x: 0, y: -0.18299997, z: 0.50040054} + m_LocalScale: {x: 0.27602, y: 0.042489994, z: 0.13891} m_Children: [] - m_Father: {fileID: 4784134879506728} - m_RootOrder: 1 + m_Father: {fileID: 4322704766028316} + m_RootOrder: 3 m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4844086258046424 +--- !u!4 &4888051724348932 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1335947341879056} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4247901987570806} - - {fileID: 4705215304218418} - m_Father: {fileID: 4058814921636558} + m_GameObject: {fileID: 1907470464862992} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0, y: -0.060000002, z: 10} + m_LocalScale: {x: 0.77, y: 0.26, z: 20} + m_Children: [] + m_Father: {fileID: 4812724525373012} m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4849354229057266 +--- !u!4 &4894325195052674 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1832473509294508} - m_LocalRotation: {x: -0, y: -0, z: 0.016506119, w: 0.9998638} - m_LocalPosition: {x: 0, y: 0.341, z: 0} - m_LocalScale: {x: 1.0441425, y: 0.19278127, z: 1.0441422} + m_GameObject: {fileID: 1014600383792522} + m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} + m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} + m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} m_Children: [] - m_Father: {fileID: 4933466107303308} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: -179.99998, z: 1.8920001} ---- !u!4 &4850267975317880 + m_Father: {fileID: 4352047625778706} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} +--- !u!4 &4904660447404230 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1916152952954816} + m_GameObject: {fileID: 1214419816843726} m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} m_LocalPosition: {x: -0.29999995, y: 0.07399994, z: 0.50040054} m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} m_Children: [] - m_Father: {fileID: 4103156959565466} + m_Father: {fileID: 4151762782664136} m_RootOrder: 2 m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4906730824288932 +--- !u!4 &4928645405444414 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1429079117656474} + m_GameObject: {fileID: 1353209702154624} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} m_LocalPosition: {x: 0, y: -0.060000002, z: 10} m_LocalScale: {x: 0.35, y: 0.13, z: 20} m_Children: [] - m_Father: {fileID: 4975251187012474} + m_Father: {fileID: 4083604007501892} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4933466107303308 +--- !u!4 &4949994601996280 Transform: m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1644033386471554} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4108818069486140} - - {fileID: 4494755947957094} - - {fileID: 4382031080713888} - - {fileID: 4290045604855054} - - {fileID: 4849354229057266} - m_Father: {fileID: 4363808360324110} + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1121462989228714} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0, y: -0.060000002, z: 10} + m_LocalScale: {x: 0.35, y: 0.13, z: 20} + m_Children: [] + m_Father: {fileID: 4393500247777624} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4955619310742228 +--- !u!4 &4980287366775932 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1580603984718180} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4797041758268616} - - {fileID: 4121151068804744} - m_Father: {fileID: 4363808360324110} + m_GameObject: {fileID: 1971119195936814} + m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -0, y: 0, z: 0} + m_LocalScale: {x: 0.01, y: 0.025, z: 0.01} + m_Children: [] + m_Father: {fileID: 4470879573182666} m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4975251187012474 +--- !u!4 &4981707683050382 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1645871539393100} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4906730824288932} - - {fileID: 4309604379810914} - m_Father: {fileID: 4273593104376064} + m_GameObject: {fileID: 1930612303318000} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0, y: -0.060000002, z: 10} + m_LocalScale: {x: 0.77, y: 0.26, z: 20} + m_Children: [] + m_Father: {fileID: 4393500247777624} m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!20 &20184829288622090 +--- !u!20 &20363738094913048 Camera: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1063860219685290} + m_GameObject: {fileID: 1644999083152214} m_Enabled: 1 serializedVersion: 2 m_ClearFlags: 2 @@ -1409,12 +1417,12 @@ Camera: m_OcclusionCulling: 1 m_StereoConvergence: 10 m_StereoSeparation: 0.022 ---- !u!20 &20416796192002010 +--- !u!20 &20380145723616022 Camera: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1976543580957312} + m_GameObject: {fileID: 1494700324769970} m_Enabled: 1 serializedVersion: 2 m_ClearFlags: 2 @@ -1445,12 +1453,12 @@ Camera: m_OcclusionCulling: 1 m_StereoConvergence: 10 m_StereoSeparation: 0.022 ---- !u!20 &20825339371991526 +--- !u!20 &20696931947702132 Camera: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1951009134417952} + m_GameObject: {fileID: 1501061473632398} m_Enabled: 1 serializedVersion: 2 m_ClearFlags: 2 @@ -1481,12 +1489,12 @@ Camera: m_OcclusionCulling: 1 m_StereoConvergence: 10 m_StereoSeparation: 0.022 ---- !u!20 &20890573870796418 +--- !u!20 &20863703825242712 Camera: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1020706016898032} + m_GameObject: {fileID: 1598006908151838} m_Enabled: 1 serializedVersion: 2 m_ClearFlags: 2 @@ -1517,12 +1525,12 @@ Camera: m_OcclusionCulling: 1 m_StereoConvergence: 10 m_StereoSeparation: 0.022 ---- !u!23 &23053845074447322 +--- !u!23 &23018451675943198 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1395664386782538} + m_GameObject: {fileID: 1270392021614266} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -1551,12 +1559,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23131301383334322 +--- !u!23 &23021469306627962 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1405292418219056} + m_GameObject: {fileID: 1971119195936814} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -1565,7 +1573,7 @@ MeshRenderer: m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} + - {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -1573,7 +1581,7 @@ MeshRenderer: m_ProbeAnchor: {fileID: 0} m_LightProbeVolumeOverride: {fileID: 0} m_ScaleInLightmap: 1 - m_PreserveUVs: 1 + m_PreserveUVs: 0 m_IgnoreNormalsForChartDetection: 0 m_ImportantGI: 0 m_StitchLightmapSeams: 0 @@ -1585,21 +1593,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23131443350435860 +--- !u!23 &23075609872029122 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1784936519446344} + m_GameObject: {fileID: 1179319070824364} m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 + m_CastShadows: 1 + m_ReceiveShadows: 1 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} + - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -1619,12 +1627,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23134278246637528 +--- !u!23 &23183609615186614 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1225831640572844} + m_GameObject: {fileID: 1358824403081850} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -1633,7 +1641,7 @@ MeshRenderer: m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} + - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -1653,12 +1661,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23135679569986010 +--- !u!23 &23187340099814854 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1354074425749060} + m_GameObject: {fileID: 1135489216937854} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -1687,12 +1695,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23140602843340680 +--- !u!23 &23229943995234896 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1321332639796692} + m_GameObject: {fileID: 1420568803306334} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -1721,21 +1729,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23152468287192832 +--- !u!23 &23249721641656990 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1901325889748826} + m_GameObject: {fileID: 1682998971059402} m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 + m_CastShadows: 0 + m_ReceiveShadows: 0 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} + - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -1755,21 +1763,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23174089715016186 +--- !u!23 &23285836828984498 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1872164461544240} + m_GameObject: {fileID: 1353209702154624} m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 + m_CastShadows: 0 + m_ReceiveShadows: 0 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} + - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -1789,12 +1797,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23199089093492790 +--- !u!23 &23328732370008150 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1429079117656474} + m_GameObject: {fileID: 1446822082845914} m_Enabled: 1 m_CastShadows: 0 m_ReceiveShadows: 0 @@ -1823,12 +1831,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23220910664895598 +--- !u!23 &23348621273683864 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1972265198832878} + m_GameObject: {fileID: 1681006282972324} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -1857,12 +1865,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23339485608791352 +--- !u!23 &23381564833934054 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1295027271656972} + m_GameObject: {fileID: 1661059072361910} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -1871,7 +1879,7 @@ MeshRenderer: m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} + - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -1891,12 +1899,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23360178695717674 +--- !u!23 &23417508503027462 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1902315786566738} + m_GameObject: {fileID: 1014600383792522} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -1905,7 +1913,7 @@ MeshRenderer: m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} + - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -1925,21 +1933,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23371983477781584 +--- !u!23 &23431158324231992 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1606019544489478} + m_GameObject: {fileID: 1873124424911068} m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 + m_CastShadows: 0 + m_ReceiveShadows: 0 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} + - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -1959,21 +1967,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23385913893104254 +--- !u!23 &23451884464532582 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1154613458410576} + m_GameObject: {fileID: 1214419816843726} m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 + m_CastShadows: 1 + m_ReceiveShadows: 1 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} + - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -1993,12 +2001,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23440411431868422 +--- !u!23 &23477035516712264 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1948317836081598} + m_GameObject: {fileID: 1725685797327414} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -2007,7 +2015,7 @@ MeshRenderer: m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} + - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -2027,12 +2035,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23446453883495642 +--- !u!23 &23493511592802438 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1644033386471554} + m_GameObject: {fileID: 1079907389665764} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -2041,7 +2049,7 @@ MeshRenderer: m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} + - {fileID: 2100000, guid: acba6bf2a290a496bb8989b42bf8698d, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -2049,7 +2057,7 @@ MeshRenderer: m_ProbeAnchor: {fileID: 0} m_LightProbeVolumeOverride: {fileID: 0} m_ScaleInLightmap: 1 - m_PreserveUVs: 1 + m_PreserveUVs: 0 m_IgnoreNormalsForChartDetection: 0 m_ImportantGI: 0 m_StitchLightmapSeams: 0 @@ -2061,12 +2069,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23448090049970378 +--- !u!23 &23553176880752320 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1832473509294508} + m_GameObject: {fileID: 1078242976093628} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -2075,7 +2083,7 @@ MeshRenderer: m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} + - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -2095,21 +2103,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23492609137453340 +--- !u!23 &23593994518387712 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1006947046326398} + m_GameObject: {fileID: 1122318382191954} m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 + m_CastShadows: 1 + m_ReceiveShadows: 1 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} + - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -2129,21 +2137,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23503300393546632 +--- !u!23 &23615217600355662 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1736278560078538} + m_GameObject: {fileID: 1907470464862992} m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 + m_CastShadows: 0 + m_ReceiveShadows: 0 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} + - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -2163,12 +2171,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23610093730846242 +--- !u!23 &23618215726505526 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1113464549287688} + m_GameObject: {fileID: 1363680577697298} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -2197,12 +2205,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23640165886292070 +--- !u!23 &23710656781028902 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1916152952954816} + m_GameObject: {fileID: 1558211054879664} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -2231,12 +2239,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23650098059578888 +--- !u!23 &23746622875505272 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1488027870493332} + m_GameObject: {fileID: 1715767492872126} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -2245,7 +2253,7 @@ MeshRenderer: m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: acba6bf2a290a496bb8989b42bf8698d, type: 2} + - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -2253,7 +2261,7 @@ MeshRenderer: m_ProbeAnchor: {fileID: 0} m_LightProbeVolumeOverride: {fileID: 0} m_ScaleInLightmap: 1 - m_PreserveUVs: 0 + m_PreserveUVs: 1 m_IgnoreNormalsForChartDetection: 0 m_ImportantGI: 0 m_StitchLightmapSeams: 0 @@ -2265,21 +2273,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23670615737810094 +--- !u!23 &23755052841482864 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1637229412367504} + m_GameObject: {fileID: 1930612303318000} m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 + m_CastShadows: 0 + m_ReceiveShadows: 0 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} + - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -2299,21 +2307,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23720538892684934 +--- !u!23 &23763015239766136 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1311950243820864} + m_GameObject: {fileID: 1669654006840182} m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 + m_CastShadows: 1 + m_ReceiveShadows: 1 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} + - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -2333,12 +2341,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23784087430264244 +--- !u!23 &23805066794631192 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1938841407205532} + m_GameObject: {fileID: 1193448108562946} m_Enabled: 1 m_CastShadows: 0 m_ReceiveShadows: 0 @@ -2367,21 +2375,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23853077636474208 +--- !u!23 &23842657651682012 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1509336059309658} + m_GameObject: {fileID: 1445621402098048} m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 + m_CastShadows: 1 + m_ReceiveShadows: 1 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} + - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -2401,12 +2409,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23859266924290150 +--- !u!23 &23861339133111532 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1360490281124436} + m_GameObject: {fileID: 1399553220224106} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -2435,12 +2443,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23897348706436050 +--- !u!23 &23937488998945520 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1922169902050698} + m_GameObject: {fileID: 1841305616332738} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -2449,7 +2457,7 @@ MeshRenderer: m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} + - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -2457,7 +2465,7 @@ MeshRenderer: m_ProbeAnchor: {fileID: 0} m_LightProbeVolumeOverride: {fileID: 0} m_ScaleInLightmap: 1 - m_PreserveUVs: 0 + m_PreserveUVs: 1 m_IgnoreNormalsForChartDetection: 0 m_ImportantGI: 0 m_StitchLightmapSeams: 0 @@ -2469,21 +2477,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23936720454023906 +--- !u!23 &23940763941071586 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1449550305145294} + m_GameObject: {fileID: 1121462989228714} m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 + m_CastShadows: 0 + m_ReceiveShadows: 0 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} + - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -2503,21 +2511,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23953636389523694 +--- !u!23 &23949188537607370 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1612432655468506} + m_GameObject: {fileID: 1225293932999958} m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 0 + m_CastShadows: 1 + m_ReceiveShadows: 1 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: 262d8cbc02b104990841408098431457, type: 2} + - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -2537,222 +2545,222 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!33 &33021456786405022 +--- !u!33 &33007201079611444 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1006947046326398} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33066230622789280 + m_GameObject: {fileID: 1135489216937854} + m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33007244417731778 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1360490281124436} + m_GameObject: {fileID: 1669654006840182} m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33141703365320568 +--- !u!33 &33064880745139274 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1311950243820864} + m_GameObject: {fileID: 1122318382191954} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33230664802658248 +--- !u!33 &33104902330510722 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1225831640572844} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33249232817837298 + m_GameObject: {fileID: 1399553220224106} + m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33112832585894242 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1509336059309658} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33311378397245648 + m_GameObject: {fileID: 1270392021614266} + m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33173463307667788 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1449550305145294} + m_GameObject: {fileID: 1225293932999958} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33331636073791150 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1488027870493332} - m_Mesh: {fileID: 4300002, guid: a9d8f499f5b9848438d280dc28b3b52e, type: 3} ---- !u!33 &33355207304875418 +--- !u!33 &33220378165577016 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1901325889748826} + m_GameObject: {fileID: 1358824403081850} m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33375700058842592 +--- !u!33 &33251900124875064 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1832473509294508} + m_GameObject: {fileID: 1661059072361910} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33407893211590960 +--- !u!33 &33291922297337334 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1902315786566738} + m_GameObject: {fileID: 1907470464862992} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33443085460647932 +--- !u!33 &33311110033190690 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1354074425749060} + m_GameObject: {fileID: 1078242976093628} m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33555959561751530 +--- !u!33 &33345590301219576 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1295027271656972} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33592911429918398 + m_GameObject: {fileID: 1930612303318000} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33398962914955792 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1113464549287688} + m_GameObject: {fileID: 1014600383792522} m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33597693146101122 +--- !u!33 &33433944005396330 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1872164461544240} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33602396174560854 + m_GameObject: {fileID: 1353209702154624} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33437662427228988 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1405292418219056} + m_GameObject: {fileID: 1682998971059402} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33608906352258716 +--- !u!33 &33437997913177172 +MeshFilter: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1363680577697298} + m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33441042292940994 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1429079117656474} + m_GameObject: {fileID: 1725685797327414} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33628516318414646 +--- !u!33 &33487151372684152 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1948317836081598} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33715293034083534 + m_GameObject: {fileID: 1193448108562946} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33497420207122002 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1321332639796692} + m_GameObject: {fileID: 1558211054879664} m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33728199568902842 +--- !u!33 &33608516916290782 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1922169902050698} - m_Mesh: {fileID: 4300000, guid: a9d8f499f5b9848438d280dc28b3b52e, type: 3} ---- !u!33 &33737671320950324 + m_GameObject: {fileID: 1214419816843726} + m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33618847153425232 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1606019544489478} + m_GameObject: {fileID: 1445621402098048} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33740040246150820 +--- !u!33 &33639571011425304 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1154613458410576} + m_GameObject: {fileID: 1121462989228714} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33777054715953836 +--- !u!33 &33656934514913342 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1938841407205532} + m_GameObject: {fileID: 1179319070824364} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33822080881909152 +--- !u!33 &33657416383297052 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1612432655468506} + m_GameObject: {fileID: 1873124424911068} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33851667657955130 +--- !u!33 &33706362889224244 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1637229412367504} + m_GameObject: {fileID: 1681006282972324} m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33891928207286534 +--- !u!33 &33795060429981700 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1736278560078538} + m_GameObject: {fileID: 1715767492872126} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33923970477956536 +--- !u!33 &33834256339971282 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1916152952954816} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33930320050166542 + m_GameObject: {fileID: 1079907389665764} + m_Mesh: {fileID: 4300002, guid: a9d8f499f5b9848438d280dc28b3b52e, type: 3} +--- !u!33 &33845612629305896 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1972265198832878} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33937136877863552 + m_GameObject: {fileID: 1446822082845914} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33886886862843664 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1644033386471554} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33938174309896050 + m_GameObject: {fileID: 1971119195936814} + m_Mesh: {fileID: 4300000, guid: a9d8f499f5b9848438d280dc28b3b52e, type: 3} +--- !u!33 &33985812240333934 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1784936519446344} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33942393414274458 + m_GameObject: {fileID: 1420568803306334} + m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33994336694509784 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1395664386782538} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!54 &54088750604404324 + m_GameObject: {fileID: 1841305616332738} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!54 &54017759129031336 Rigidbody: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1443673828307814} + m_GameObject: {fileID: 1939112378710628} serializedVersion: 2 m_Mass: 10 m_Drag: 0.5 @@ -2762,12 +2770,12 @@ Rigidbody: m_Interpolate: 0 m_Constraints: 112 m_CollisionDetection: 0 ---- !u!54 &54338553804475180 +--- !u!54 &54260679202702464 Rigidbody: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1169383371125934} + m_GameObject: {fileID: 1373801553976666} serializedVersion: 2 m_Mass: 10 m_Drag: 0.5 @@ -2777,12 +2785,12 @@ Rigidbody: m_Interpolate: 0 m_Constraints: 112 m_CollisionDetection: 0 ---- !u!54 &54626777054134360 +--- !u!54 &54289954206234004 Rigidbody: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1964761937961574} + m_GameObject: {fileID: 1165679820726490} serializedVersion: 2 m_Mass: 10 m_Drag: 0.5 @@ -2792,12 +2800,12 @@ Rigidbody: m_Interpolate: 0 m_Constraints: 112 m_CollisionDetection: 0 ---- !u!54 &54753735395303746 +--- !u!54 &54578239364845406 Rigidbody: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1661627863960872} + m_GameObject: {fileID: 1317136368302180} serializedVersion: 2 m_Mass: 10 m_Drag: 0.5 @@ -2807,12 +2815,12 @@ Rigidbody: m_Interpolate: 0 m_Constraints: 112 m_CollisionDetection: 0 ---- !u!64 &64906907870115852 +--- !u!64 &64888510637481462 MeshCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1922169902050698} + m_GameObject: {fileID: 1971119195936814} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 1 @@ -2821,296 +2829,428 @@ MeshCollider: m_CookingOptions: 14 m_SkinWidth: 0.01 m_Mesh: {fileID: 4300000, guid: a9d8f499f5b9848438d280dc28b3b52e, type: 3} ---- !u!65 &65059201626075844 +--- !u!65 &65015492267237124 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1488027870493332} + m_GameObject: {fileID: 1930612303318000} m_Material: {fileID: 0} m_IsTrigger: 0 - m_Enabled: 1 + m_Enabled: 0 serializedVersion: 2 - m_Size: {x: 10000, y: 2.220446e-12, z: 10000} - m_Center: {x: 0, y: -3.111633e-13, z: 0} ---- !u!65 &65110456292660578 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!65 &65057825353511568 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1429079117656474} + m_GameObject: {fileID: 1907470464862992} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 0 serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65178766753981896 +--- !u!65 &65357436224164412 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1443673828307814} + m_GameObject: {fileID: 1446822082845914} m_Material: {fileID: 0} m_IsTrigger: 0 - m_Enabled: 1 + m_Enabled: 0 serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65220200117116584 +--- !u!65 &65589377080510240 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1311950243820864} + m_GameObject: {fileID: 1079907389665764} m_Material: {fileID: 0} m_IsTrigger: 0 - m_Enabled: 0 + m_Enabled: 1 serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65285650247585738 + m_Size: {x: 10000, y: 2.220446e-12, z: 10000} + m_Center: {x: 0, y: -3.111633e-13, z: 0} +--- !u!65 &65589463239440526 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1938841407205532} + m_GameObject: {fileID: 1165679820726490} m_Material: {fileID: 0} m_IsTrigger: 0 - m_Enabled: 0 + m_Enabled: 1 serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65393336763004480 +--- !u!65 &65682238052370530 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1154613458410576} + m_GameObject: {fileID: 1193448108562946} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 0 serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65441205843344002 +--- !u!65 &65722116964617048 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1661627863960872} + m_GameObject: {fileID: 1353209702154624} m_Material: {fileID: 0} m_IsTrigger: 0 - m_Enabled: 1 + m_Enabled: 0 serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65533246245578526 +--- !u!65 &65738389332670280 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1006947046326398} + m_GameObject: {fileID: 1873124424911068} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 0 serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65540878552782854 +--- !u!65 &65739180902805812 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1964761937961574} + m_GameObject: {fileID: 1317136368302180} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 1 serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65588617864146588 +--- !u!65 &65814032642346288 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1509336059309658} + m_GameObject: {fileID: 1939112378710628} m_Material: {fileID: 0} m_IsTrigger: 0 - m_Enabled: 0 + m_Enabled: 1 serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65744343711438542 +--- !u!65 &65913236404058334 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1784936519446344} + m_GameObject: {fileID: 1373801553976666} m_Material: {fileID: 0} m_IsTrigger: 0 - m_Enabled: 0 + m_Enabled: 1 serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65901695084016898 +--- !u!65 &65918024835619606 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1612432655468506} + m_GameObject: {fileID: 1121462989228714} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 0 serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65932430430952094 +--- !u!65 &65949213028989648 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1169383371125934} + m_GameObject: {fileID: 1682998971059402} m_Material: {fileID: 0} m_IsTrigger: 0 - m_Enabled: 1 + m_Enabled: 0 serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} ---- !u!114 &114080992426104682 +--- !u!114 &114036270357198286 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1373801553976666} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 282f342c2ab144bf38be65d4d0c4e07d, type: 3} + m_Name: + m_EditorClassIdentifier: + camera: {fileID: 20696931947702132} + sensorName: CameraSensor + width: 84 + height: 84 + grayscale: 0 +--- !u!114 &114322691115031348 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1939112378710628} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 282f342c2ab144bf38be65d4d0c4e07d, type: 3} + m_Name: + m_EditorClassIdentifier: + camera: {fileID: 20363738094913048} + sensorName: CameraSensor + width: 84 + height: 84 + grayscale: 0 +--- !u!114 &114326390494230518 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1165679820726490} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 282f342c2ab144bf38be65d4d0c4e07d, type: 3} + m_Name: + m_EditorClassIdentifier: + camera: {fileID: 20380145723616022} + sensorName: CameraSensor + width: 84 + height: 84 + grayscale: 0 +--- !u!114 &114380897261200276 MonoBehaviour: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1169383371125934} + m_GameObject: {fileID: 1165679820726490} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: c66e6845309d241c78a6d77ee2567928, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 24e823594179d48189b2c78003c50ce0, type: 2} agentParameters: - agentCameras: - - {fileID: 20825339371991526} - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 numberOfActionsBetweenDecisions: 5 - area: {fileID: 1373903370712748} + area: {fileID: 1145096862361766} turnSpeed: 300 moveSpeed: 2 normalMaterial: {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} badMaterial: {fileID: 2100000, guid: 88b9ae7af2c1748a0a1f63407587a601, type: 2} goodMaterial: {fileID: 2100000, guid: c67450f290f3e4897bc40276a619e78d, type: 2} frozenMaterial: {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} - myLaser: {fileID: 1071481650937550} + myLaser: {fileID: 1900094563283840} contribute: 0 useVectorObs: 0 ---- !u!114 &114257846618384398 +--- !u!114 &114422602055470240 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1145096862361766} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: e14a1d4accf634e3fb9b5bc94d023393, type: 3} + m_Name: + m_EditorClassIdentifier: + food: {fileID: 1699568242032636, guid: b59e4a7fd76af471cadd16e90133a084, type: 2} + badFood: {fileID: 1853821981215314, guid: 350871b865967466daa1f596193cc22e, type: 2} + numFood: 25 + numBadFood: 25 + respawnFood: 1 + range: 45 +--- !u!114 &114429222608880102 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1317136368302180} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 282f342c2ab144bf38be65d4d0c4e07d, type: 3} + m_Name: + m_EditorClassIdentifier: + camera: {fileID: 20863703825242712} + sensorName: CameraSensor + width: 84 + height: 84 + grayscale: 0 +--- !u!114 &114484596947519388 MonoBehaviour: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1443673828307814} + m_GameObject: {fileID: 1373801553976666} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: c66e6845309d241c78a6d77ee2567928, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 24e823594179d48189b2c78003c50ce0, type: 2} agentParameters: - agentCameras: - - {fileID: 20890573870796418} - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 numberOfActionsBetweenDecisions: 5 - area: {fileID: 1373903370712748} + area: {fileID: 1145096862361766} turnSpeed: 300 moveSpeed: 2 normalMaterial: {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} badMaterial: {fileID: 2100000, guid: 88b9ae7af2c1748a0a1f63407587a601, type: 2} goodMaterial: {fileID: 2100000, guid: c67450f290f3e4897bc40276a619e78d, type: 2} frozenMaterial: {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} - myLaser: {fileID: 1580603984718180} + myLaser: {fileID: 1898252046043334} contribute: 0 useVectorObs: 0 ---- !u!114 &114552799170750468 +--- !u!114 &114556471765155272 MonoBehaviour: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1373903370712748} + m_GameObject: {fileID: 1373801553976666} m_Enabled: 1 m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: e14a1d4accf634e3fb9b5bc94d023393, type: 3} + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} m_Name: m_EditorClassIdentifier: - food: {fileID: 1699568242032636, guid: b59e4a7fd76af471cadd16e90133a084, type: 2} - badFood: {fileID: 1853821981215314, guid: 350871b865967466daa1f596193cc22e, type: 2} - numFood: 25 - numBadFood: 25 - respawnFood: 1 - range: 45 ---- !u!114 &114625025344622832 + m_BrainParameters: + vectorObservationSize: 0 + numStackedVectorObservations: 1 + vectorActionSize: 03000000030000000300000002000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 0} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: VisualFoodCollector +--- !u!114 &114724832030926412 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1165679820726490} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 0 + numStackedVectorObservations: 1 + vectorActionSize: 03000000030000000300000002000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 0} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: VisualFoodCollector +--- !u!114 &114729119221978826 MonoBehaviour: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1661627863960872} + m_GameObject: {fileID: 1939112378710628} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: c66e6845309d241c78a6d77ee2567928, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 24e823594179d48189b2c78003c50ce0, type: 2} agentParameters: - agentCameras: - - {fileID: 20184829288622090} - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 numberOfActionsBetweenDecisions: 5 - area: {fileID: 1373903370712748} + area: {fileID: 1145096862361766} turnSpeed: 300 moveSpeed: 2 normalMaterial: {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} badMaterial: {fileID: 2100000, guid: 88b9ae7af2c1748a0a1f63407587a601, type: 2} goodMaterial: {fileID: 2100000, guid: c67450f290f3e4897bc40276a619e78d, type: 2} frozenMaterial: {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} - myLaser: {fileID: 1335947341879056} + myLaser: {fileID: 1779831409734062} contribute: 0 useVectorObs: 0 ---- !u!114 &114855044884614820 +--- !u!114 &114799611184247278 MonoBehaviour: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1964761937961574} + m_GameObject: {fileID: 1939112378710628} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 0 + numStackedVectorObservations: 1 + vectorActionSize: 03000000030000000300000002000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 0} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: VisualFoodCollector +--- !u!114 &114869844339180154 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1317136368302180} m_Enabled: 1 m_EditorHideFlags: 0 m_Script: {fileID: 11500000, guid: c66e6845309d241c78a6d77ee2567928, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 24e823594179d48189b2c78003c50ce0, type: 2} agentParameters: - agentCameras: - - {fileID: 20416796192002010} - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 numberOfActionsBetweenDecisions: 5 - area: {fileID: 1373903370712748} + area: {fileID: 1145096862361766} turnSpeed: 300 moveSpeed: 2 normalMaterial: {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} badMaterial: {fileID: 2100000, guid: 88b9ae7af2c1748a0a1f63407587a601, type: 2} goodMaterial: {fileID: 2100000, guid: c67450f290f3e4897bc40276a619e78d, type: 2} frozenMaterial: {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} - myLaser: {fileID: 1645871539393100} + myLaser: {fileID: 1307818939507544} contribute: 0 useVectorObs: 0 +--- !u!114 &114942033465821694 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1317136368302180} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 0 + numStackedVectorObservations: 1 + vectorActionSize: 03000000030000000300000002000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 0} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: VisualFoodCollector diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scenes/FoodCollector.unity b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scenes/FoodCollector.unity index 62ed06ba1e..db8535a5e6 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scenes/FoodCollector.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scenes/FoodCollector.unity @@ -747,11 +747,6 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 4fe57113e76a5426297487dd6faadc5b, type: 3} m_Name: m_EditorClassIdentifier: - broadcastHub: - broadcastingBrains: - - {fileID: 11400000, guid: 9e7865ec29c894c2d8c1617b0fa392f9, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 1500 m_TrainingConfiguration: width: 500 height: 500 diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scenes/FoodCollectorIL.unity b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scenes/FoodCollectorIL.unity deleted file mode 100644 index 44a02854f3..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scenes/FoodCollectorIL.unity +++ /dev/null @@ -1,880 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!29 &1 -OcclusionCullingSettings: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_OcclusionBakeSettings: - smallestOccluder: 5 - smallestHole: 0.25 - backfaceThreshold: 100 - m_SceneGUID: 00000000000000000000000000000000 - m_OcclusionCullingData: {fileID: 0} ---- !u!104 &2 -RenderSettings: - m_ObjectHideFlags: 0 - serializedVersion: 8 - m_Fog: 0 - m_FogColor: {r: 0.5, g: 0.5, b: 0.5, a: 1} - m_FogMode: 3 - m_FogDensity: 0.01 - m_LinearFogStart: 0 - m_LinearFogEnd: 300 - m_AmbientSkyColor: {r: 0.8, g: 0.8, b: 0.8, a: 1} - m_AmbientEquatorColor: {r: 0.6965513, g: 0, b: 1, a: 1} - m_AmbientGroundColor: {r: 1, g: 0.45977026, b: 0, a: 1} - m_AmbientIntensity: 1 - m_AmbientMode: 3 - m_SubtractiveShadowColor: {r: 0.42, g: 0.478, b: 0.627, a: 1} - m_SkyboxMaterial: {fileID: 0} - m_HaloStrength: 0.5 - m_FlareStrength: 1 - m_FlareFadeSpeed: 3 - m_HaloTexture: {fileID: 0} - m_SpotCookie: {fileID: 10001, guid: 0000000000000000e000000000000000, type: 0} - m_DefaultReflectionMode: 0 - m_DefaultReflectionResolution: 128 - m_ReflectionBounces: 1 - m_ReflectionIntensity: 1 - m_CustomReflection: {fileID: 0} - m_Sun: {fileID: 0} - m_IndirectSpecularColor: {r: 0, g: 0, b: 0, a: 1} ---- !u!157 &3 -LightmapSettings: - m_ObjectHideFlags: 0 - serializedVersion: 11 - m_GIWorkflowMode: 1 - m_GISettings: - serializedVersion: 2 - m_BounceScale: 1 - m_IndirectOutputScale: 1 - m_AlbedoBoost: 1 - m_TemporalCoherenceThreshold: 1 - m_EnvironmentLightingMode: 0 - m_EnableBakedLightmaps: 1 - m_EnableRealtimeLightmaps: 1 - m_LightmapEditorSettings: - serializedVersion: 9 - m_Resolution: 2 - m_BakeResolution: 40 - m_TextureWidth: 1024 - m_TextureHeight: 1024 - m_AO: 0 - m_AOMaxDistance: 1 - m_CompAOExponent: 1 - m_CompAOExponentDirect: 0 - m_Padding: 2 - m_LightmapParameters: {fileID: 0} - m_LightmapsBakeMode: 1 - m_TextureCompression: 1 - m_FinalGather: 0 - m_FinalGatherFiltering: 1 - m_FinalGatherRayCount: 256 - m_ReflectionCompression: 2 - m_MixedBakeMode: 2 - m_BakeBackend: 0 - m_PVRSampling: 1 - m_PVRDirectSampleCount: 32 - m_PVRSampleCount: 500 - m_PVRBounces: 2 - m_PVRFilterTypeDirect: 0 - m_PVRFilterTypeIndirect: 0 - m_PVRFilterTypeAO: 0 - m_PVRFilteringMode: 1 - m_PVRCulling: 1 - m_PVRFilteringGaussRadiusDirect: 1 - m_PVRFilteringGaussRadiusIndirect: 5 - m_PVRFilteringGaussRadiusAO: 2 - m_PVRFilteringAtrousPositionSigmaDirect: 0.5 - m_PVRFilteringAtrousPositionSigmaIndirect: 2 - m_PVRFilteringAtrousPositionSigmaAO: 1 - m_ShowResolutionOverlay: 1 - m_LightingDataAsset: {fileID: 112000002, guid: 03723c7f910c3423aa1974f1b9ce8392, - type: 2} - m_UseShadowmask: 1 ---- !u!196 &4 -NavMeshSettings: - serializedVersion: 2 - m_ObjectHideFlags: 0 - m_BuildSettings: - serializedVersion: 2 - agentTypeID: 0 - agentRadius: 0.5 - agentHeight: 2 - agentSlope: 45 - agentClimb: 0.4 - ledgeDropHeight: 0 - maxJumpAcrossDistance: 0 - minRegionArea: 2 - manualCellSize: 0 - cellSize: 0.16666667 - manualTileSize: 0 - tileSize: 256 - accuratePlacement: 0 - debug: - m_Flags: 0 - m_NavMeshData: {fileID: 0} ---- !u!1 &192430538 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 192430542} - - component: {fileID: 192430541} - - component: {fileID: 192430540} - m_Layer: 0 - m_Name: Top-Down - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!124 &192430540 -Behaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 192430538} - m_Enabled: 1 ---- !u!20 &192430541 -Camera: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 192430538} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 3 - m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0.62 - y: 0.24 - width: 0.6 - height: 0.6 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 1 - orthographic size: 26 - m_Depth: 3 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!4 &192430542 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 192430538} - m_LocalRotation: {x: 0.7071068, y: 0, z: 0, w: 0.7071068} - m_LocalPosition: {x: 0, y: 90, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 90, y: 0, z: 0} ---- !u!1 &273651478 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 273651479} - - component: {fileID: 273651481} - - component: {fileID: 273651480} - m_Layer: 5 - m_Name: Text - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!224 &273651479 -RectTransform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 273651478} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 1799584681} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} - m_AnchorMin: {x: 0, y: 0} - m_AnchorMax: {x: 1, y: 1} - m_AnchoredPosition: {x: 0, y: 0} - m_SizeDelta: {x: 0, y: 0} - m_Pivot: {x: 0.5, y: 0.5} ---- !u!114 &273651480 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 273651478} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 708705254, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_Material: {fileID: 0} - m_Color: {r: 0.9338235, g: 0.9338235, b: 0.9338235, a: 0.784} - m_RaycastTarget: 1 - m_OnCullStateChanged: - m_PersistentCalls: - m_Calls: [] - m_TypeName: UnityEngine.UI.MaskableGraphic+CullStateChangedEvent, UnityEngine.UI, - Version=1.0.0.0, Culture=neutral, PublicKeyToken=null - m_FontData: - m_Font: {fileID: 10102, guid: 0000000000000000e000000000000000, type: 0} - m_FontSize: 300 - m_FontStyle: 1 - m_BestFit: 0 - m_MinSize: 8 - m_MaxSize: 300 - m_Alignment: 4 - m_AlignByGeometry: 0 - m_RichText: 1 - m_HorizontalOverflow: 0 - m_VerticalOverflow: 0 - m_LineSpacing: 1 - m_Text: NOM ---- !u!222 &273651481 -CanvasRenderer: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 273651478} ---- !u!1 &378228137 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 378228141} - - component: {fileID: 378228140} - - component: {fileID: 378228139} - - component: {fileID: 378228138} - m_Layer: 5 - m_Name: Canvas - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!114 &378228138 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 378228137} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 1301386320, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_IgnoreReversedGraphics: 1 - m_BlockingObjects: 0 - m_BlockingMask: - serializedVersion: 2 - m_Bits: 4294967295 ---- !u!114 &378228139 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 378228137} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 1980459831, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_UiScaleMode: 1 - m_ReferencePixelsPerUnit: 100 - m_ScaleFactor: 1 - m_ReferenceResolution: {x: 800, y: 600} - m_ScreenMatchMode: 0 - m_MatchWidthOrHeight: 0.5 - m_PhysicalUnit: 3 - m_FallbackScreenDPI: 96 - m_DefaultSpriteDPI: 96 - m_DynamicPixelsPerUnit: 1 ---- !u!223 &378228140 -Canvas: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 378228137} - m_Enabled: 1 - serializedVersion: 3 - m_RenderMode: 0 - m_Camera: {fileID: 0} - m_PlaneDistance: 100 - m_PixelPerfect: 0 - m_ReceivesEvents: 1 - m_OverrideSorting: 0 - m_OverridePixelPerfect: 0 - m_SortingBucketNormalizedSize: 0 - m_AdditionalShaderChannelsFlag: 0 - m_SortingLayerID: 0 - m_SortingOrder: 0 - m_TargetDisplay: 0 ---- !u!224 &378228141 -RectTransform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 378228137} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 0, y: 0, z: 0} - m_Children: - - {fileID: 1799584681} - - {fileID: 1196437248} - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} - m_AnchorMin: {x: 0, y: 0} - m_AnchorMax: {x: 0, y: 0} - m_AnchoredPosition: {x: 0, y: 0} - m_SizeDelta: {x: 0, y: 0} - m_Pivot: {x: 0, y: 0} ---- !u!1 &499540684 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 499540687} - - component: {fileID: 499540686} - - component: {fileID: 499540685} - m_Layer: 0 - m_Name: EventSystem - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!114 &499540685 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 499540684} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 1077351063, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_HorizontalAxis: Horizontal - m_VerticalAxis: Vertical - m_SubmitButton: Submit - m_CancelButton: Cancel - m_InputActionsPerSecond: 10 - m_RepeatDelay: 0.5 - m_ForceModuleActive: 0 ---- !u!114 &499540686 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 499540684} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: -619905303, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_FirstSelected: {fileID: 0} - m_sendNavigationEvents: 1 - m_DragThreshold: 5 ---- !u!4 &499540687 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 499540684} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1001 &974906832 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_RootOrder - value: 6 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchoredPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchoredPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_SizeDelta.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_SizeDelta.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMin.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMin.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMax.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMax.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_Pivot.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_Pivot.y - value: 0 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 3ce107b4a79bc4eef83afde434932a68, type: 2} - m_IsPrefabParent: 0 ---- !u!1 &1196437247 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1196437248} - - component: {fileID: 1196437250} - - component: {fileID: 1196437249} - m_Layer: 5 - m_Name: Stats - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!224 &1196437248 -RectTransform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1196437247} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 378228141} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} - m_AnchorMin: {x: 0.5, y: 0.5} - m_AnchorMax: {x: 0.5, y: 0.5} - m_AnchoredPosition: {x: 282.4, y: -170.26} - m_SizeDelta: {x: 300, y: 30} - m_Pivot: {x: 0.5, y: 0.5} ---- !u!114 &1196437249 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1196437247} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 708705254, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_Material: {fileID: 0} - m_Color: {r: 1, g: 1, b: 1, a: 1} - m_RaycastTarget: 1 - m_OnCullStateChanged: - m_PersistentCalls: - m_Calls: [] - m_TypeName: UnityEngine.UI.MaskableGraphic+CullStateChangedEvent, UnityEngine.UI, - Version=1.0.0.0, Culture=neutral, PublicKeyToken=null - m_FontData: - m_Font: {fileID: 10102, guid: 0000000000000000e000000000000000, type: 0} - m_FontSize: 23 - m_FontStyle: 0 - m_BestFit: 0 - m_MinSize: 10 - m_MaxSize: 40 - m_Alignment: 1 - m_AlignByGeometry: 0 - m_RichText: 1 - m_HorizontalOverflow: 0 - m_VerticalOverflow: 0 - m_LineSpacing: 1 - m_Text: 'Total Reward: 0' ---- !u!222 &1196437250 -CanvasRenderer: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1196437247} ---- !u!1001 &1357737440 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.x - value: 106.38621 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.y - value: 38.840767 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.z - value: 34.72934 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.x - value: 0.31598538 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.y - value: -0.3596048 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.z - value: 0.13088542 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.w - value: 0.8681629 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_RootOrder - value: 5 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - m_IsPrefabParent: 0 ---- !u!1 &1570348456 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1570348460} - - component: {fileID: 1570348459} - m_Layer: 0 - m_Name: Camera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!20 &1570348459 -Camera: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1570348456} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0, g: 0, b: 0, a: 0} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 0 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!4 &1570348460 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1570348456} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: -12.999352, y: -5.8986187, z: 24.784279} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &1574236047 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1574236049} - - component: {fileID: 1574236048} - m_Layer: 0 - m_Name: Academy - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!114 &1574236048 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1574236047} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 4fe57113e76a5426297487dd6faadc5b, type: 3} - m_Name: - m_EditorClassIdentifier: - broadcastHub: - broadcastingBrains: - - {fileID: 11400000, guid: 9e7865ec29c894c2d8c1617b0fa392f9, type: 2} - - {fileID: 11400000, guid: dff7429d656234fed84c4fac2a7a683c, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 1500 - m_TrainingConfiguration: - width: 500 - height: 500 - qualityLevel: 0 - timeScale: 15 - targetFrameRate: -1 - m_InferenceConfiguration: - width: 1280 - height: 720 - qualityLevel: 5 - timeScale: 1 - targetFrameRate: 60 - resetParameters: - m_ResetParameters: - - key: laser_length - value: 1 - - key: agent_scale - value: 1 - agents: [] - listArea: [] - totalScore: 0 - scoreText: {fileID: 1196437249} ---- !u!4 &1574236049 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1574236047} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0.71938086, y: 0.27357092, z: 4.1970553} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &1799584680 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1799584681} - - component: {fileID: 1799584683} - - component: {fileID: 1799584682} - m_Layer: 5 - m_Name: Panel - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 0 ---- !u!224 &1799584681 -RectTransform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1799584680} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 273651479} - m_Father: {fileID: 378228141} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} - m_AnchorMin: {x: 0, y: 0} - m_AnchorMax: {x: 1, y: 1} - m_AnchoredPosition: {x: 0, y: 0} - m_SizeDelta: {x: 0, y: 0} - m_Pivot: {x: 0.5, y: 0.5} ---- !u!114 &1799584682 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1799584680} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: -765806418, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_Material: {fileID: 0} - m_Color: {r: 0, g: 0, b: 0, a: 0.472} - m_RaycastTarget: 1 - m_OnCullStateChanged: - m_PersistentCalls: - m_Calls: [] - m_TypeName: UnityEngine.UI.MaskableGraphic+CullStateChangedEvent, UnityEngine.UI, - Version=1.0.0.0, Culture=neutral, PublicKeyToken=null - m_Sprite: {fileID: 10907, guid: 0000000000000000f000000000000000, type: 0} - m_Type: 1 - m_PreserveAspect: 0 - m_FillCenter: 1 - m_FillMethod: 4 - m_FillAmount: 1 - m_FillClockwise: 1 - m_FillOrigin: 0 ---- !u!222 &1799584683 -CanvasRenderer: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1799584680} ---- !u!1001 &2043682756 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 4372429183926998, guid: 4e556f5e95e27473da078d43fcea9c54, type: 2} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4372429183926998, guid: 4e556f5e95e27473da078d43fcea9c54, type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4372429183926998, guid: 4e556f5e95e27473da078d43fcea9c54, type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4372429183926998, guid: 4e556f5e95e27473da078d43fcea9c54, type: 2} - propertyPath: m_LocalRotation.x - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4372429183926998, guid: 4e556f5e95e27473da078d43fcea9c54, type: 2} - propertyPath: m_LocalRotation.y - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4372429183926998, guid: 4e556f5e95e27473da078d43fcea9c54, type: 2} - propertyPath: m_LocalRotation.z - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4372429183926998, guid: 4e556f5e95e27473da078d43fcea9c54, type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4372429183926998, guid: 4e556f5e95e27473da078d43fcea9c54, type: 2} - propertyPath: m_RootOrder - value: 7 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 4e556f5e95e27473da078d43fcea9c54, type: 2} - m_IsPrefabParent: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scenes/FoodCollectorIL.unity.meta b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scenes/FoodCollectorIL.unity.meta deleted file mode 100644 index 5bec6b5b15..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scenes/FoodCollectorIL.unity.meta +++ /dev/null @@ -1,9 +0,0 @@ -fileFormatVersion: 2 -guid: 3ae10073cde7641f488ef7c87862333a -timeCreated: 1517881609 -licenseType: Pro -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scenes/VisualFoodCollector.unity b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scenes/VisualFoodCollector.unity index 4bd5b6c8ab..2512ad5616 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scenes/VisualFoodCollector.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scenes/VisualFoodCollector.unity @@ -383,6 +383,48 @@ Prefab: m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 3ce107b4a79bc4eef83afde434932a68, type: 2} m_IsPrefabParent: 0 +--- !u!1001 &868060419 +Prefab: + m_ObjectHideFlags: 0 + serializedVersion: 2 + m_Modification: + m_TransformParent: {fileID: 0} + m_Modifications: + - target: {fileID: 4307641258646068, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} + propertyPath: m_LocalPosition.x + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4307641258646068, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} + propertyPath: m_LocalPosition.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4307641258646068, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} + propertyPath: m_LocalPosition.z + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4307641258646068, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} + propertyPath: m_LocalRotation.x + value: -0 + objectReference: {fileID: 0} + - target: {fileID: 4307641258646068, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} + propertyPath: m_LocalRotation.y + value: -0 + objectReference: {fileID: 0} + - target: {fileID: 4307641258646068, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} + propertyPath: m_LocalRotation.z + value: -0 + objectReference: {fileID: 0} + - target: {fileID: 4307641258646068, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} + propertyPath: m_LocalRotation.w + value: 1 + objectReference: {fileID: 0} + - target: {fileID: 4307641258646068, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} + propertyPath: m_RootOrder + value: 5 + objectReference: {fileID: 0} + m_RemovedComponents: [] + m_ParentPrefab: {fileID: 100100000, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} + m_IsPrefabParent: 0 --- !u!1 &1009000883 GameObject: m_ObjectHideFlags: 0 @@ -448,53 +490,6 @@ Camera: m_OcclusionCulling: 1 m_StereoConvergence: 10 m_StereoSeparation: 0.022 ---- !u!1001 &1081822017 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 4612263362188236, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4612263362188236, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4612263362188236, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4612263362188236, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} - propertyPath: m_LocalRotation.x - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4612263362188236, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} - propertyPath: m_LocalRotation.y - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4612263362188236, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} - propertyPath: m_LocalRotation.z - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4612263362188236, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4612263362188236, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} - propertyPath: m_RootOrder - value: 5 - objectReference: {fileID: 0} - - target: {fileID: 23446453883495642, guid: c85b585836e104587b4efdc4d8b9d62b, - type: 2} - propertyPath: m_Materials.Array.data[0] - value: - objectReference: {fileID: 2100000, guid: 580f2003972f64189826f085e2498080, type: 3} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: c85b585836e104587b4efdc4d8b9d62b, type: 2} - m_IsPrefabParent: 0 --- !u!1 &1086444495 GameObject: m_ObjectHideFlags: 0 @@ -642,11 +637,6 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 4fe57113e76a5426297487dd6faadc5b, type: 3} m_Name: m_EditorClassIdentifier: - broadcastHub: - broadcastingBrains: - - {fileID: 11400000, guid: 24e823594179d48189b2c78003c50ce0, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 m_TrainingConfiguration: width: 500 height: 500 diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scripts/FoodCollectorAgent.cs b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scripts/FoodCollectorAgent.cs index 7c72ccfcf8..c00e27adb0 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scripts/FoodCollectorAgent.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/Scripts/FoodCollectorAgent.cs @@ -91,54 +91,45 @@ public void MoveAgent(float[] act) if (!m_Frozen) { var shootCommand = false; - if (brain.brainParameters.vectorActionSpaceType == SpaceType.Continuous) + var forwardAxis = (int)act[0]; + var rightAxis = (int)act[1]; + var rotateAxis = (int)act[2]; + var shootAxis = (int)act[3]; + + switch (forwardAxis) { - dirToGo = transform.forward * Mathf.Clamp(act[0], -1f, 1f); - rotateDir = transform.up * Mathf.Clamp(act[1], -1f, 1f); - shootCommand = Mathf.Clamp(act[2], -1f, 1f) > 0.5f; + case 1: + dirToGo = transform.forward; + break; + case 2: + dirToGo = -transform.forward; + break; } - else - { - var forwardAxis = (int)act[0]; - var rightAxis = (int)act[1]; - var rotateAxis = (int)act[2]; - var shootAxis = (int)act[3]; - switch (forwardAxis) - { - case 1: - dirToGo = transform.forward; - break; - case 2: - dirToGo = -transform.forward; - break; - } - - switch (rightAxis) - { - case 1: - dirToGo = transform.right; - break; - case 2: - dirToGo = -transform.right; - break; - } + switch (rightAxis) + { + case 1: + dirToGo = transform.right; + break; + case 2: + dirToGo = -transform.right; + break; + } - switch (rotateAxis) - { - case 1: - rotateDir = -transform.up; - break; - case 2: - rotateDir = transform.up; - break; - } - switch (shootAxis) - { - case 1: - shootCommand = true; - break; - } + switch (rotateAxis) + { + case 1: + rotateDir = -transform.up; + break; + case 2: + rotateDir = transform.up; + break; + } + switch (shootAxis) + { + case 1: + shootCommand = true; + break; } if (shootCommand) { @@ -222,6 +213,29 @@ public override void AgentAction(float[] vectorAction, string textAction) MoveAgent(vectorAction); } + public override float[] Heuristic() + { + var action = new float[4]; + if (Input.GetKey(KeyCode.D)) + { + action[2] = 2f; + } + if (Input.GetKey(KeyCode.W)) + { + action[0] = 1f; + } + if (Input.GetKey(KeyCode.A)) + { + action[2] = 1f; + } + if (Input.GetKey(KeyCode.S)) + { + action[0] = 2f; + } + action[3] = Input.GetKey(KeyCode.Space) ? 1.0f : 0.0f; + return action; + } + public override void AgentReset() { Unfreeze(); diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/TFModels/FoodCollector.nn b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/TFModels/FoodCollector.nn new file mode 100644 index 0000000000..6876fd633f Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/TFModels/FoodCollector.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/TFModels/FoodCollector.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/TFModels/FoodCollector.nn.meta new file mode 100644 index 0000000000..3f0e2820ad --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/TFModels/FoodCollector.nn.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 36ab3e93020504f48858d0856f939685 +ScriptedImporter: + userData: + assetBundleName: + assetBundleVariant: + script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/TFModels/FoodCollectorLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/TFModels/FoodCollectorLearning.nn deleted file mode 100644 index cb64b58e6e..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/TFModels/FoodCollectorLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/TFModels/FoodCollectorLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/TFModels/FoodCollectorLearning.nn.meta deleted file mode 100644 index 44b7eb4548..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/FoodCollector/TFModels/FoodCollectorLearning.nn.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: d32fca21cf4c04536ab7f88eb9de83e0 -ScriptedImporter: - userData: - assetBundleName: - assetBundleVariant: - script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Brains.meta b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Brains.meta deleted file mode 100644 index 9b02d23635..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Brains.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 157deb51cff8d4ad7a8d031c8647f094 -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Brains/GridWorldLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Brains/GridWorldLearning.asset deleted file mode 100644 index 4519f4aaa2..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Brains/GridWorldLearning.asset +++ /dev/null @@ -1,26 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: GridWorldLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 0 - numStackedVectorObservations: 1 - vectorActionSize: 05000000 - cameraResolutions: - - width: 84 - height: 84 - blackAndWhite: 0 - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - model: {fileID: 11400000, guid: 07afbd1d35ed345eeb850fcbb59eae0b, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Brains/GridWorldLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Brains/GridWorldLearning.asset.meta deleted file mode 100644 index 68d7e75ef0..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Brains/GridWorldLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 2c1d51b7167874f31beda0b0cf0af468 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Brains/GridWorldPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Brains/GridWorldPlayer.asset deleted file mode 100644 index 9c8c3729f7..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Brains/GridWorldPlayer.asset +++ /dev/null @@ -1,39 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: GridWorldPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 0 - numStackedVectorObservations: 1 - vectorActionSize: 05000000 - cameraResolutions: - - width: 84 - height: 84 - blackAndWhite: 0 - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - keyContinuousPlayerActions: [] - axisContinuousPlayerActions: [] - discretePlayerActions: - - key: 273 - branchIndex: 0 - value: 1 - - key: 274 - branchIndex: 0 - value: 2 - - key: 276 - branchIndex: 0 - value: 3 - - key: 275 - branchIndex: 0 - value: 4 diff --git a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Brains/GridWorldPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Brains/GridWorldPlayer.asset.meta deleted file mode 100644 index 1278d44562..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Brains/GridWorldPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 8096722eb0a294871857e202e0032082 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockTeacherArea.prefab b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Prefabs/Area.prefab similarity index 58% rename from UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockTeacherArea.prefab rename to UnitySDK/Assets/ML-Agents/Examples/GridWorld/Prefabs/Area.prefab index 81ae5d9f42..7d6c5073a9 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockTeacherArea.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Prefabs/Area.prefab @@ -9,397 +9,428 @@ Prefab: m_Modifications: [] m_RemovedComponents: [] m_ParentPrefab: {fileID: 0} - m_RootGameObject: {fileID: 1998262265628184} + m_RootGameObject: {fileID: 1625008366184734} m_IsPrefabParent: 1 ---- !u!1 &1050940377068380 +--- !u!1 &1220141488340396 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4380521747867302} - - component: {fileID: 20489154120297920} + - component: {fileID: 4007504045862718} + - component: {fileID: 33222498917940530} + - component: {fileID: 65623874337418966} + - component: {fileID: 23631786362770220} m_Layer: 0 - m_Name: AgentCamera - m_TagString: Untagged + m_Name: sS + m_TagString: wall m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 - m_IsActive: 0 ---- !u!1 &1065140995714380 + m_IsActive: 1 +--- !u!1 &1324124466577712 GameObject: m_ObjectHideFlags: 0 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4804205853184492} - - component: {fileID: 33841258435083658} - - component: {fileID: 65463547633449572} - - component: {fileID: 23070782658444740} - - component: {fileID: 54186038127787192} - - component: {fileID: 114981596078727782} + - component: {fileID: 4036590373541758} m_Layer: 0 - m_Name: Block - m_TagString: block + m_Name: scene + m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1107013619425306 +--- !u!1 &1376404096451388 GameObject: - m_ObjectHideFlags: 0 + m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4275118890839636} - - component: {fileID: 33762099158972748} - - component: {fileID: 23635576611763492} - - component: {fileID: 65043724339313280} + - component: {fileID: 4206155882393090} + - component: {fileID: 33152597024021804} + - component: {fileID: 65829039252076494} + - component: {fileID: 23027717151182946} m_Layer: 0 - m_Name: Goal - m_TagString: goal + m_Name: Cube (1) + m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1111462179877322 +--- !u!1 &1394424645070404 GameObject: - m_ObjectHideFlags: 1 + m_ObjectHideFlags: 0 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4969137272367842} - - component: {fileID: 33578090780189410} - - component: {fileID: 23291080264910442} + - component: {fileID: 4890346887087870} + - component: {fileID: 20743940359151984} m_Layer: 0 - m_Name: eye + m_Name: agentCam m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1117908652629434 +--- !u!1 &1488387672112076 GameObject: m_ObjectHideFlags: 0 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4983497294429112} - - component: {fileID: 33806912783577538} - - component: {fileID: 23760568518666472} - - component: {fileID: 65606732635901800} - m_Layer: 0 - m_Name: Ground - m_TagString: ground + - component: {fileID: 4034807106460652} + - component: {fileID: 33823710649932060} + - component: {fileID: 65073501172061214} + - component: {fileID: 114935253044749092} + - component: {fileID: 114650561397225712} + - component: {fileID: 114889700908650620} + m_Layer: 8 + m_Name: Agent + m_TagString: agent m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1154602294442548 +--- !u!1 &1625008366184734 GameObject: - m_ObjectHideFlags: 1 + m_ObjectHideFlags: 0 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4221095405773878} - - component: {fileID: 33218117416258202} - - component: {fileID: 23200813283054622} + - component: {fileID: 4124767863011510} + - component: {fileID: 114704252266302846} m_Layer: 0 - m_Name: Headband + m_Name: Area m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1389796355382970 +--- !u!1 &1656910849934022 GameObject: - m_ObjectHideFlags: 0 + m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4484965669593502} - - component: {fileID: 33147913423988468} - - component: {fileID: 23929017032951708} - - component: {fileID: 65946719936341988} - - component: {fileID: 65298478788444986} - - component: {fileID: 65640529576786566} - - component: {fileID: 65700895826228724} + - component: {fileID: 4399229758781002} + - component: {fileID: 33099526047820694} + - component: {fileID: 65461269218509740} + - component: {fileID: 23289473826438240} m_Layer: 0 - m_Name: WallsOuter + m_Name: sW m_TagString: wall m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1763784100511744 +--- !u!1 &1659433968697430 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4966454211646196} - - component: {fileID: 33561504467589168} - - component: {fileID: 23690109511573182} + - component: {fileID: 4269224463113992} + - component: {fileID: 33813191588135886} + - component: {fileID: 65922560747716658} + - component: {fileID: 23740566138369150} m_Layer: 0 - m_Name: mouth + m_Name: Cube m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1828199053218262 +--- !u!1 &1696233368146792 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4482771625505758} - - component: {fileID: 33679690945918774} - - component: {fileID: 23670272114869376} + - component: {fileID: 4616835664324708} + - component: {fileID: 33825707296015202} + - component: {fileID: 65020466233620882} + - component: {fileID: 23205928308810388} m_Layer: 0 - m_Name: eye + m_Name: Cube (3) m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1944730196513194 +--- !u!1 &1805019891541352 GameObject: - m_ObjectHideFlags: 0 + m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4470080690475952} - - component: {fileID: 54604342782839414} - - component: {fileID: 114200042090600512} - - component: {fileID: 114729166155037006} - - component: {fileID: 65582940058808712} + - component: {fileID: 4041386967126538} + - component: {fileID: 33172111281575160} + - component: {fileID: 65321351298141546} + - component: {fileID: 23200653194041678} m_Layer: 0 - m_Name: Agent - m_TagString: agent + m_Name: Cube (2) + m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1984370730071604 +--- !u!1 &1817050562469182 GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4367610518221326} - - component: {fileID: 33101896306102620} - - component: {fileID: 23924406224953340} + - component: {fileID: 4088684435237278} + - component: {fileID: 33550006272110778} + - component: {fileID: 65782631683949718} + - component: {fileID: 23048682015641784} m_Layer: 0 - m_Name: AgentCube_Blue - m_TagString: Untagged + m_Name: sE + m_TagString: wall m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1998262265628184 +--- !u!1 &1881546218232006 GameObject: - m_ObjectHideFlags: 0 + m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} serializedVersion: 5 m_Component: - - component: {fileID: 4086282048714158} - m_Layer: 0 - m_Name: PushBlockTeacherArea + - component: {fileID: 4558725385767926} + - component: {fileID: 33934167732208046} + - component: {fileID: 64291102267821286} + - component: {fileID: 23096611355272904} + m_Layer: 8 + m_Name: Plane m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!4 &4086282048714158 -Transform: +--- !u!1 &1898983423426052 +GameObject: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1998262265628184} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -15, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4470080690475952} - - {fileID: 4275118890839636} - - {fileID: 4983497294429112} - - {fileID: 4484965669593502} - - {fileID: 4804205853184492} - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4221095405773878 + serializedVersion: 5 + m_Component: + - component: {fileID: 4479182187388718} + - component: {fileID: 33572314435256338} + - component: {fileID: 65944324207520424} + - component: {fileID: 23171092457376468} + m_Layer: 0 + m_Name: sN + m_TagString: wall + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &4007504045862718 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1154602294442548} - m_LocalRotation: {x: -0, y: -0, z: 0.016506119, w: 0.9998638} - m_LocalPosition: {x: 0, y: 0.341, z: 0} - m_LocalScale: {x: 1.0441425, y: 0.19278127, z: 1.0441422} + m_GameObject: {fileID: 1220141488340396} + m_LocalRotation: {x: -0, y: 0.7071068, z: -0, w: 0.7071068} + m_LocalPosition: {x: 0, y: 0, z: -1} + m_LocalScale: {x: 1, y: 0.5, z: 2} m_Children: [] - m_Father: {fileID: 4367610518221326} + m_Father: {fileID: 4036590373541758} m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: -179.99998, z: 1.8920001} ---- !u!4 &4275118890839636 + m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} +--- !u!4 &4034807106460652 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1107013619425306} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: -0.03, z: -10.5} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.010748733} - m_Children: [] - m_Father: {fileID: 4086282048714158} + m_GameObject: {fileID: 1488387672112076} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 4269224463113992} + - {fileID: 4206155882393090} + - {fileID: 4041386967126538} + - {fileID: 4616835664324708} + m_Father: {fileID: 4124767863011510} m_RootOrder: 1 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4367610518221326 +--- !u!4 &4036590373541758 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1984370730071604} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_GameObject: {fileID: 1324124466577712} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - - {fileID: 4380521747867302} - - {fileID: 4482771625505758} - - {fileID: 4969137272367842} - - {fileID: 4966454211646196} - - {fileID: 4221095405773878} - m_Father: {fileID: 4470080690475952} + - {fileID: 4558725385767926} + - {fileID: 4088684435237278} + - {fileID: 4399229758781002} + - {fileID: 4479182187388718} + - {fileID: 4007504045862718} + m_Father: {fileID: 4124767863011510} m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4380521747867302 +--- !u!4 &4041386967126538 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1050940377068380} + m_GameObject: {fileID: 1805019891541352} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0.15} - m_LocalScale: {x: 1, y: 1, z: 1} + m_LocalPosition: {x: 0, y: 0, z: -0.4} + m_LocalScale: {x: 1, y: 1, z: 0.2} m_Children: [] - m_Father: {fileID: 4367610518221326} - m_RootOrder: 0 + m_Father: {fileID: 4034807106460652} + m_RootOrder: 2 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4470080690475952 +--- !u!4 &4088684435237278 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1944730196513194} - m_LocalRotation: {x: 0, y: 1, z: 0, w: 0} - m_LocalPosition: {x: 0, y: 1, z: 3} + m_GameObject: {fileID: 1817050562469182} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 1, y: 0, z: 0} + m_LocalScale: {x: 1, y: 0.5, z: 2} + m_Children: [] + m_Father: {fileID: 4036590373541758} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!4 &4124767863011510 +Transform: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1625008366184734} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: - - {fileID: 4367610518221326} - m_Father: {fileID: 4086282048714158} + - {fileID: 4036590373541758} + - {fileID: 4034807106460652} + - {fileID: 4890346887087870} + m_Father: {fileID: 0} m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4482771625505758 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!4 &4206155882393090 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1828199053218262} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} + m_GameObject: {fileID: 1376404096451388} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0.4, y: 0, z: 0} + m_LocalScale: {x: 0.2, y: 1, z: 1} m_Children: [] - m_Father: {fileID: 4367610518221326} + m_Father: {fileID: 4034807106460652} m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4484965669593502 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!4 &4269224463113992 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1389796355382970} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: 0, z: 0} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} + m_GameObject: {fileID: 1659433968697430} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -0.4, y: 0, z: 0} + m_LocalScale: {x: 0.2, y: 1, z: 1} m_Children: [] - m_Father: {fileID: 4086282048714158} - m_RootOrder: 3 + m_Father: {fileID: 4034807106460652} + m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4804205853184492 +--- !u!4 &4399229758781002 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1065140995714380} + m_GameObject: {fileID: 1656910849934022} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 2, y: 1, z: -1.43} - m_LocalScale: {x: 2, y: 0.75, z: 2} + m_LocalPosition: {x: -1, y: 0, z: 0} + m_LocalScale: {x: 1, y: 0.5, z: 2} m_Children: [] - m_Father: {fileID: 4086282048714158} - m_RootOrder: 4 + m_Father: {fileID: 4036590373541758} + m_RootOrder: 2 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4966454211646196 +--- !u!4 &4479182187388718 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1763784100511744} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0, y: -0.18299997, z: 0.50040054} - m_LocalScale: {x: 0.27602, y: 0.042489994, z: 0.13891} + m_GameObject: {fileID: 1898983423426052} + m_LocalRotation: {x: -0, y: 0.7071068, z: -0, w: 0.7071068} + m_LocalPosition: {x: 0, y: 0, z: 1} + m_LocalScale: {x: 1, y: 0.5, z: 2} m_Children: [] - m_Father: {fileID: 4367610518221326} + m_Father: {fileID: 4036590373541758} m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4969137272367842 + m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} +--- !u!4 &4558725385767926 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1111462179877322} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: -0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} + m_GameObject: {fileID: 1881546218232006} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0, y: -0.5, z: 0} + m_LocalScale: {x: 0.1, y: 0.1, z: 0.1} m_Children: [] - m_Father: {fileID: 4367610518221326} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4983497294429112 + m_Father: {fileID: 4036590373541758} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!4 &4616835664324708 Transform: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1117908652629434} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: 0, z: -0.00000030517577} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} + m_GameObject: {fileID: 1696233368146792} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0.4} + m_LocalScale: {x: 1, y: 1, z: 0.2} m_Children: [] - m_Father: {fileID: 4086282048714158} - m_RootOrder: 2 + m_Father: {fileID: 4034807106460652} + m_RootOrder: 3 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!20 &20489154120297920 +--- !u!4 &4890346887087870 +Transform: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1394424645070404} + m_LocalRotation: {x: 0.7071068, y: -0, z: -0, w: 0.7071068} + m_LocalPosition: {x: 0, y: 5, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 4124767863011510} + m_RootOrder: 2 + m_LocalEulerAnglesHint: {x: 90, y: 0, z: 0} +--- !u!20 &20743940359151984 Camera: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1050940377068380} + m_GameObject: {fileID: 1394424645070404} m_Enabled: 1 serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.46666667, g: 0.5647059, b: 0.60784316, a: 1} + m_ClearFlags: 1 + m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} m_NormalizedViewPortRect: serializedVersion: 2 x: 0 @@ -409,29 +440,29 @@ Camera: near clip plane: 0.3 far clip plane: 1000 field of view: 60 - orthographic: 0 + orthographic: 1 orthographic size: 5 m_Depth: 0 m_CullingMask: serializedVersion: 2 - m_Bits: 4294950911 + m_Bits: 4294967295 m_RenderingPath: -1 m_TargetTexture: {fileID: 0} m_TargetDisplay: 0 m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 + m_HDR: 0 + m_AllowMSAA: 0 m_AllowDynamicResolution: 0 m_ForceIntoRT: 0 m_OcclusionCulling: 1 m_StereoConvergence: 10 m_StereoSeparation: 0.022 ---- !u!23 &23070782658444740 +--- !u!23 &23027717151182946 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1065140995714380} + m_GameObject: {fileID: 1376404096451388} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -440,7 +471,7 @@ MeshRenderer: m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} + - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -460,21 +491,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23200813283054622 +--- !u!23 &23048682015641784 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1154602294442548} + m_GameObject: {fileID: 1817050562469182} m_Enabled: 1 - m_CastShadows: 1 + m_CastShadows: 0 m_ReceiveShadows: 1 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} + - {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -494,21 +525,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23291080264910442 +--- !u!23 &23096611355272904 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1111462179877322} + m_GameObject: {fileID: 1881546218232006} m_Enabled: 1 - m_CastShadows: 1 + m_CastShadows: 0 m_ReceiveShadows: 1 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} + - {fileID: 2100000, guid: acba6bf2a290a496bb8989b42bf8698d, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -528,21 +559,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23635576611763492 +--- !u!23 &23171092457376468 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1107013619425306} + m_GameObject: {fileID: 1898983423426052} m_Enabled: 1 - m_CastShadows: 1 + m_CastShadows: 0 m_ReceiveShadows: 1 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: c67450f290f3e4897bc40276a619e78d, type: 2} + - {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -550,7 +581,7 @@ MeshRenderer: m_ProbeAnchor: {fileID: 0} m_LightProbeVolumeOverride: {fileID: 0} m_ScaleInLightmap: 1 - m_PreserveUVs: 0 + m_PreserveUVs: 1 m_IgnoreNormalsForChartDetection: 0 m_ImportantGI: 0 m_StitchLightmapSeams: 0 @@ -562,12 +593,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23670272114869376 +--- !u!23 &23200653194041678 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1828199053218262} + m_GameObject: {fileID: 1805019891541352} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -576,7 +607,7 @@ MeshRenderer: m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} + - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -596,12 +627,12 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23690109511573182 +--- !u!23 &23205928308810388 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1763784100511744} + m_GameObject: {fileID: 1696233368146792} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 @@ -610,7 +641,7 @@ MeshRenderer: m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} + - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -630,21 +661,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23760568518666472 +--- !u!23 &23289473826438240 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1117908652629434} + m_GameObject: {fileID: 1656910849934022} m_Enabled: 1 - m_CastShadows: 1 + m_CastShadows: 0 m_ReceiveShadows: 1 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: acba6bf2a290a496bb8989b42bf8698d, type: 2} + - {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -652,7 +683,7 @@ MeshRenderer: m_ProbeAnchor: {fileID: 0} m_LightProbeVolumeOverride: {fileID: 0} m_ScaleInLightmap: 1 - m_PreserveUVs: 0 + m_PreserveUVs: 1 m_IgnoreNormalsForChartDetection: 0 m_ImportantGI: 0 m_StitchLightmapSeams: 0 @@ -664,21 +695,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23924406224953340 +--- !u!23 &23631786362770220 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1984370730071604} + m_GameObject: {fileID: 1220141488340396} m_Enabled: 1 - m_CastShadows: 1 + m_CastShadows: 0 m_ReceiveShadows: 1 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} + - {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -698,21 +729,21 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!23 &23929017032951708 +--- !u!23 &23740566138369150 MeshRenderer: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1389796355382970} + m_GameObject: {fileID: 1659433968697430} m_Enabled: 1 - m_CastShadows: 1 + m_CastShadows: 0 m_ReceiveShadows: 1 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - - {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} + - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 @@ -720,7 +751,7 @@ MeshRenderer: m_ProbeAnchor: {fileID: 0} m_LightProbeVolumeOverride: {fileID: 0} m_ScaleInLightmap: 1 - m_PreserveUVs: 0 + m_PreserveUVs: 1 m_IgnoreNormalsForChartDetection: 0 m_ImportantGI: 0 m_StitchLightmapSeams: 0 @@ -732,243 +763,268 @@ MeshRenderer: m_SortingLayerID: 0 m_SortingLayer: 0 m_SortingOrder: 0 ---- !u!33 &33101896306102620 +--- !u!33 &33099526047820694 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1984370730071604} + m_GameObject: {fileID: 1656910849934022} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33147913423988468 +--- !u!33 &33152597024021804 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1389796355382970} - m_Mesh: {fileID: 4300000, guid: c639386c12f5f7841892163a199dfacc, type: 3} ---- !u!33 &33218117416258202 + m_GameObject: {fileID: 1376404096451388} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33172111281575160 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1154602294442548} + m_GameObject: {fileID: 1805019891541352} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33561504467589168 +--- !u!33 &33222498917940530 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1763784100511744} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33578090780189410 + m_GameObject: {fileID: 1220141488340396} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33550006272110778 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1111462179877322} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33679690945918774 + m_GameObject: {fileID: 1817050562469182} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33572314435256338 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1828199053218262} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33762099158972748 + m_GameObject: {fileID: 1898983423426052} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33813191588135886 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1107013619425306} - m_Mesh: {fileID: 4300004, guid: c639386c12f5f7841892163a199dfacc, type: 3} ---- !u!33 &33806912783577538 + m_GameObject: {fileID: 1659433968697430} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33823710649932060 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1117908652629434} - m_Mesh: {fileID: 4300002, guid: c639386c12f5f7841892163a199dfacc, type: 3} ---- !u!33 &33841258435083658 + m_GameObject: {fileID: 1488387672112076} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!33 &33825707296015202 MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1065140995714380} + m_GameObject: {fileID: 1696233368146792} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!54 &54186038127787192 -Rigidbody: +--- !u!33 &33934167732208046 +MeshFilter: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1065140995714380} - serializedVersion: 2 - m_Mass: 10 - m_Drag: 0.5 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 112 - m_CollisionDetection: 0 ---- !u!54 &54604342782839414 -Rigidbody: + m_GameObject: {fileID: 1881546218232006} + m_Mesh: {fileID: 10209, guid: 0000000000000000e000000000000000, type: 0} +--- !u!64 &64291102267821286 +MeshCollider: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1881546218232006} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 0 + serializedVersion: 3 + m_Convex: 0 + m_CookingOptions: 14 + m_SkinWidth: 0.01 + m_Mesh: {fileID: 10209, guid: 0000000000000000e000000000000000, type: 0} +--- !u!65 &65020466233620882 +BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1944730196513194} + m_GameObject: {fileID: 1696233368146792} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 0 serializedVersion: 2 - m_Mass: 10 - m_Drag: 4 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 112 - m_CollisionDetection: 0 ---- !u!65 &65043724339313280 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!65 &65073501172061214 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1107013619425306} + m_GameObject: {fileID: 1488387672112076} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 1 serializedVersion: 2 - m_Size: {x: 2500, y: 5, z: 400} - m_Center: {x: 0, y: 2.5, z: 0} ---- !u!65 &65298478788444986 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!65 &65321351298141546 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1389796355382970} + m_GameObject: {fileID: 1805019891541352} m_Material: {fileID: 0} m_IsTrigger: 0 - m_Enabled: 1 + m_Enabled: 0 serializedVersion: 2 - m_Size: {x: 50, y: 200, z: 2600} - m_Center: {x: 1275, y: 50, z: 0} ---- !u!65 &65463547633449572 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!65 &65461269218509740 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1065140995714380} + m_GameObject: {fileID: 1656910849934022} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 1 serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65582940058808712 +--- !u!65 &65623874337418966 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1944730196513194} + m_GameObject: {fileID: 1220141488340396} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 1 serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65606732635901800 +--- !u!65 &65782631683949718 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1117908652629434} + m_GameObject: {fileID: 1817050562469182} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 1 serializedVersion: 2 - m_Size: {x: 2500, y: 100, z: 2500} - m_Center: {x: 0, y: -50, z: 0} ---- !u!65 &65640529576786566 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!65 &65829039252076494 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1389796355382970} + m_GameObject: {fileID: 1376404096451388} m_Material: {fileID: 0} m_IsTrigger: 0 - m_Enabled: 1 + m_Enabled: 0 serializedVersion: 2 - m_Size: {x: 2500, y: 200, z: 50} - m_Center: {x: 0, y: 50, z: -1275} ---- !u!65 &65700895826228724 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!65 &65922560747716658 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1389796355382970} + m_GameObject: {fileID: 1659433968697430} m_Material: {fileID: 0} m_IsTrigger: 0 - m_Enabled: 1 + m_Enabled: 0 serializedVersion: 2 - m_Size: {x: 2500, y: 200, z: 50} - m_Center: {x: 0, y: 50, z: 1275} ---- !u!65 &65946719936341988 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!65 &65944324207520424 BoxCollider: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1389796355382970} + m_GameObject: {fileID: 1898983423426052} m_Material: {fileID: 0} m_IsTrigger: 0 m_Enabled: 1 serializedVersion: 2 - m_Size: {x: 50, y: 200, z: 2600} - m_Center: {x: -1275, y: 50, z: 0} ---- !u!114 &114200042090600512 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!114 &114650561397225712 MonoBehaviour: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1944730196513194} + m_GameObject: {fileID: 1488387672112076} m_Enabled: 1 m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: dea8c4f2604b947e6b7b97750dde87ca, type: 3} + m_Script: {fileID: 11500000, guid: 857707f3f352541d5b858efca4479b95, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: dd07b1953eac4411b81fba032f394726, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] - maxStep: 5000 + maxStep: 100 resetOnDone: 1 - onDemandDecision: 0 - numberOfActionsBetweenDecisions: 5 - ground: {fileID: 1117908652629434} - area: {fileID: 1998262265628184} - areaBounds: - m_Center: {x: 0, y: 0, z: 0} - m_Extent: {x: 0, y: 0, z: 0} - goal: {fileID: 1107013619425306} - block: {fileID: 1065140995714380} - goalDetect: {fileID: 0} - useVectorObs: 1 ---- !u!114 &114729166155037006 + onDemandDecision: 1 + numberOfActionsBetweenDecisions: 1 + area: {fileID: 114704252266302846} + timeBetweenDecisionsAtInference: 0.15 + renderCamera: {fileID: 0} + maskActions: 1 +--- !u!114 &114704252266302846 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1625008366184734} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 676658555cb2d4884aa8285062aab2a1, type: 3} + m_Name: + m_EditorClassIdentifier: + actorObjs: [] + players: + trueAgent: {fileID: 1488387672112076} + goalPref: {fileID: 1508142483324970, guid: 1ec4e4e96e7514d45b7ebc3ba5a9a481, type: 2} + pitPref: {fileID: 1811317785436014, guid: d13ee2db77b3a4dcc8664d2fe2a0f219, type: 2} +--- !u!114 &114889700908650620 MonoBehaviour: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1944730196513194} + m_GameObject: {fileID: 1488387672112076} m_Enabled: 1 m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} + m_Script: {fileID: 11500000, guid: 282f342c2ab144bf38be65d4d0c4e07d, type: 3} m_Name: m_EditorClassIdentifier: ---- !u!114 &114981596078727782 + camera: {fileID: 20743940359151984} + sensorName: CameraSensor + width: 84 + height: 84 + grayscale: 0 +--- !u!114 &114935253044749092 MonoBehaviour: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1065140995714380} + m_GameObject: {fileID: 1488387672112076} m_Enabled: 1 m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 7d079d09ceed84ff49cf6841c66cf7ec, type: 3} + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} m_Name: m_EditorClassIdentifier: - agent: {fileID: 0} + m_BrainParameters: + vectorObservationSize: 0 + numStackedVectorObservations: 1 + vectorActionSize: 05000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 11400000, guid: a812f1ce7763a4a0c912717f3594fe20, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: GridWorld diff --git a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Prefabs/Crawler.prefab.meta b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Prefabs/Area.prefab.meta similarity index 79% rename from UnitySDK/Assets/ML-Agents/Examples/Crawler/Prefabs/Crawler.prefab.meta rename to UnitySDK/Assets/ML-Agents/Examples/GridWorld/Prefabs/Area.prefab.meta index 7683cc731b..080b950657 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Crawler/Prefabs/Crawler.prefab.meta +++ b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Prefabs/Area.prefab.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 4e6c8b33846f1431fa63f6c0fdf0d2f2 +guid: 5c2bd19e4bbda4991b74387ca5d28156 NativeFormatImporter: externalObjects: {} mainObjectFileID: 100100000 diff --git a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Prefabs/agent.prefab b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Prefabs/agent.prefab deleted file mode 100644 index 2fa67a7986..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Prefabs/agent.prefab +++ /dev/null @@ -1,424 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!1001 &100100000 -Prefab: - m_ObjectHideFlags: 1 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: [] - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 0} - m_RootGameObject: {fileID: 1657514749044530} - m_IsPrefabParent: 1 ---- !u!1 &1124730825420414 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4908995675122566} - - component: {fileID: 33868538460775916} - - component: {fileID: 65776998495964602} - - component: {fileID: 23664094343919516} - m_Layer: 0 - m_Name: Cube - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1398059089449618 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4269400265441256} - - component: {fileID: 33021421730238064} - - component: {fileID: 65941830505251280} - - component: {fileID: 23992263755224526} - m_Layer: 0 - m_Name: Cube (3) - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1588535744273888 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4752617014207442} - - component: {fileID: 33332910158849260} - - component: {fileID: 65799423352771430} - - component: {fileID: 23034980085661240} - m_Layer: 0 - m_Name: Cube (2) - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1657514749044530 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4587181662868872} - - component: {fileID: 33731433020831250} - - component: {fileID: 65005393801495654} - - component: {fileID: 114143683117020968} - m_Layer: 8 - m_Name: agent - m_TagString: agent - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1918200557250438 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4198354005210282} - - component: {fileID: 33125726096858218} - - component: {fileID: 65835699142283578} - - component: {fileID: 23645573982654932} - m_Layer: 0 - m_Name: Cube (1) - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &4198354005210282 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1918200557250438} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0.4, y: 0, z: 0} - m_LocalScale: {x: 0.2, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4587181662868872} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4269400265441256 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1398059089449618} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0.4} - m_LocalScale: {x: 1, y: 1, z: 0.2} - m_Children: [] - m_Father: {fileID: 4587181662868872} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4587181662868872 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1657514749044530} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4908995675122566} - - {fileID: 4198354005210282} - - {fileID: 4752617014207442} - - {fileID: 4269400265441256} - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4752617014207442 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1588535744273888} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: -0.4} - m_LocalScale: {x: 1, y: 1, z: 0.2} - m_Children: [] - m_Father: {fileID: 4587181662868872} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4908995675122566 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1124730825420414} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0.4, y: 0, z: 0} - m_LocalScale: {x: 0.2, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4587181662868872} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!23 &23034980085661240 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1588535744273888} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 00d852aac9443402984416f9dbcd22ea, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23645573982654932 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1918200557250438} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 00d852aac9443402984416f9dbcd22ea, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23664094343919516 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1124730825420414} - m_Enabled: 1 - m_CastShadows: 0 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 00d852aac9443402984416f9dbcd22ea, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23992263755224526 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1398059089449618} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 00d852aac9443402984416f9dbcd22ea, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!33 &33021421730238064 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1398059089449618} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33125726096858218 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1918200557250438} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33332910158849260 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1588535744273888} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33731433020831250 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1657514749044530} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33868538460775916 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1124730825420414} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!65 &65005393801495654 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1657514749044530} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65776998495964602 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1124730825420414} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65799423352771430 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1588535744273888} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65835699142283578 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1918200557250438} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65941830505251280 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1398059089449618} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 0 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!114 &114143683117020968 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1657514749044530} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 857707f3f352541d5b858efca4479b95, type: 3} - m_Name: - m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 2c1d51b7167874f31beda0b0cf0af468, type: 2} - agentParameters: - agentCameras: - - {fileID: 0} - maxStep: 100 - resetOnDone: 1 - onDemandDecision: 1 - numberOfActionsBetweenDecisions: 1 - timeBetweenDecisionsAtInference: 0.15 - maskActions: 1 diff --git a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Prefabs/agent.prefab.meta b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Prefabs/agent.prefab.meta deleted file mode 100755 index b455d1e093..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Prefabs/agent.prefab.meta +++ /dev/null @@ -1,9 +0,0 @@ -fileFormatVersion: 2 -guid: 628960e910f094ad1909ecc88cc8016d -timeCreated: 1504978468 -licenseType: Pro -NativeFormatImporter: - mainObjectFileID: 0 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scenes/GridWorld.unity b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scenes/GridWorld.unity index 87b5349376..1d5e451569 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scenes/GridWorld.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scenes/GridWorld.unity @@ -38,7 +38,7 @@ RenderSettings: m_ReflectionIntensity: 1 m_CustomReflection: {fileID: 0} m_Sun: {fileID: 0} - m_IndirectSpecularColor: {r: 0.4497121, g: 0.4997778, b: 0.5756369, a: 1} + m_IndirectSpecularColor: {r: 0.44971162, g: 0.49977726, b: 0.5756362, a: 1} --- !u!157 &3 LightmapSettings: m_ObjectHideFlags: 0 @@ -141,11 +141,6 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 506d4052eefa14ec9bbb356e3669043d, type: 3} m_Name: m_EditorClassIdentifier: - broadcastHub: - broadcastingBrains: - - {fileID: 11400000, guid: 2c1d51b7167874f31beda0b0cf0af468, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 m_TrainingConfiguration: width: 84 height: 84 @@ -166,14 +161,7 @@ MonoBehaviour: value: 1 - key: numGoals value: 1 - actorObjs: [] - players: - trueAgent: {fileID: 858783320} - gridSize: 0 - camObject: {fileID: 99095112} - agentPref: {fileID: 1657514749044530, guid: 628960e910f094ad1909ecc88cc8016d, type: 2} - goalPref: {fileID: 1508142483324970, guid: 1ec4e4e96e7514d45b7ebc3ba5a9a481, type: 2} - pitPref: {fileID: 1811317785436014, guid: d13ee2db77b3a4dcc8664d2fe2a0f219, type: 2} + MainCamera: {fileID: 99095115} --- !u!4 &2047664 Transform: m_ObjectHideFlags: 0 @@ -185,7 +173,7 @@ Transform: m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 6 + m_RootOrder: 5 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!1 &99095112 GameObject: @@ -266,8 +254,215 @@ Transform: m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 0 + m_RootOrder: 4 m_LocalEulerAnglesHint: {x: 45, y: 45, z: 0} +--- !u!1 &125487785 +GameObject: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 1488387672112076, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + serializedVersion: 5 + m_Component: + - component: {fileID: 125487786} + - component: {fileID: 125487789} + - component: {fileID: 125487788} + - component: {fileID: 125487790} + - component: {fileID: 125487787} + - component: {fileID: 125487791} + m_Layer: 8 + m_Name: Agent + m_TagString: agent + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &125487786 +Transform: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 4034807106460652, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 125487785} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 260425460} + - {fileID: 1777557590} + - {fileID: 1892751310} + - {fileID: 665184833} + m_Father: {fileID: 1795599558} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!114 &125487787 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 114650561397225712, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 125487785} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 857707f3f352541d5b858efca4479b95, type: 3} + m_Name: + m_EditorClassIdentifier: + agentParameters: + maxStep: 100 + resetOnDone: 1 + onDemandDecision: 1 + numberOfActionsBetweenDecisions: 1 + area: {fileID: 1795599557} + timeBetweenDecisionsAtInference: 0.15 + renderCamera: {fileID: 797520692} + maskActions: 1 +--- !u!65 &125487788 +BoxCollider: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 65073501172061214, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 125487785} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 1 + serializedVersion: 2 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!33 &125487789 +MeshFilter: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 33823710649932060, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 125487785} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!114 &125487790 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 125487785} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 0 + numStackedVectorObservations: 1 + vectorActionSize: 05000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 11400000, guid: 07afbd1d35ed345eeb850fcbb59eae0b, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: GridWorld +--- !u!114 &125487791 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 125487785} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 132e1194facb64429b007ea1edf562d0, type: 3} + m_Name: + m_EditorClassIdentifier: + renderTexture: {fileID: 8400000, guid: 114608d5384404f89bff4b6f88432958, type: 2} + sensorName: RenderTextureSensor + width: 84 + height: 84 + grayscale: 0 +--- !u!1 &260425459 +GameObject: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 1659433968697430, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + serializedVersion: 5 + m_Component: + - component: {fileID: 260425460} + - component: {fileID: 260425463} + - component: {fileID: 260425462} + - component: {fileID: 260425461} + m_Layer: 0 + m_Name: Cube + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &260425460 +Transform: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 4269224463113992, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 260425459} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: -0.4, y: 0, z: 0} + m_LocalScale: {x: 0.2, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 125487786} + m_RootOrder: 0 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &260425461 +MeshRenderer: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 23740566138369150, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 260425459} + m_Enabled: 1 + m_CastShadows: 0 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_Materials: + - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_PreserveUVs: 1 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 0 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!65 &260425462 +BoxCollider: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 65922560747716658, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 260425459} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 0 + serializedVersion: 2 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!33 &260425463 +MeshFilter: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 33813191588135886, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 260425459} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &363761396 GameObject: m_ObjectHideFlags: 0 @@ -357,7 +552,7 @@ RectTransform: - {fileID: 918893359} - {fileID: 1305247360} m_Father: {fileID: 0} - m_RootOrder: 4 + m_RootOrder: 3 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} m_AnchorMin: {x: 0, y: 0} m_AnchorMax: {x: 0, y: 0} @@ -367,7 +562,8 @@ RectTransform: --- !u!1 &486401523 GameObject: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 1324124466577712, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} serializedVersion: 5 m_Component: @@ -382,10 +578,11 @@ GameObject: --- !u!4 &486401524 Transform: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 4036590373541758, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 486401523} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} m_LocalPosition: {x: 0, y: 0, z: 0} m_LocalScale: {x: 1, y: 1, z: 1} m_Children: @@ -394,149 +591,274 @@ Transform: - {fileID: 959566332} - {fileID: 1938864793} - {fileID: 1726089814} - m_Father: {fileID: 0} - m_RootOrder: 2 + m_Father: {fileID: 1795599558} + m_RootOrder: 0 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &489340223 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 489340224} - - component: {fileID: 489340228} - m_Layer: 0 - m_Name: agentCam - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &489340224 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 489340223} - m_LocalRotation: {x: 0.7071068, y: -0, z: -0, w: 0.7071068} - m_LocalPosition: {x: 0, y: 5, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 5 - m_LocalEulerAnglesHint: {x: 90, y: 0, z: 0} ---- !u!20 &489340228 -Camera: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 489340223} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 1 - m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 1 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 8400000, guid: 114608d5384404f89bff4b6f88432958, type: 2} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 0 - m_AllowMSAA: 0 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!1001 &715789529 +--- !u!1001 &561544094 Prefab: m_ObjectHideFlags: 0 serializedVersion: 2 m_Modification: m_TransformParent: {fileID: 0} m_Modifications: - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_LocalPosition.x - value: 0 + value: -1 objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_LocalPosition.y value: 0 objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_LocalPosition.z - value: 0 + value: -1 objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_LocalRotation.x - value: 0.31598538 + value: 0 objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_LocalRotation.y - value: -0.3596048 + value: 0 objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_LocalRotation.z - value: 0.13088542 + value: 0 objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_LocalRotation.w - value: 0.8681629 + value: 1 objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_RootOrder + value: 13 + objectReference: {fileID: 0} + - target: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_Name + value: Area (6) + objectReference: {fileID: 0} + - target: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_IsActive value: 1 objectReference: {fileID: 0} m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + m_ParentPrefab: {fileID: 100100000, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} m_IsPrefabParent: 0 ---- !u!1 &742849316 +--- !u!1 &665184832 GameObject: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 1696233368146792, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} serializedVersion: 5 m_Component: - - component: {fileID: 742849319} - - component: {fileID: 742849318} - - component: {fileID: 742849317} + - component: {fileID: 665184833} + - component: {fileID: 665184836} + - component: {fileID: 665184835} + - component: {fileID: 665184834} m_Layer: 0 - m_Name: EventSystem + m_Name: Cube (3) m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!114 &742849317 -MonoBehaviour: +--- !u!4 &665184833 +Transform: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 4616835664324708, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 742849316} + m_GameObject: {fileID: 665184832} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0.4} + m_LocalScale: {x: 1, y: 1, z: 0.2} + m_Children: [] + m_Father: {fileID: 125487786} + m_RootOrder: 3 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &665184834 +MeshRenderer: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 23205928308810388, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 665184832} m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 1077351063, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3} - m_Name: - m_EditorClassIdentifier: - m_HorizontalAxis: Horizontal - m_VerticalAxis: Vertical - m_SubmitButton: Submit - m_CancelButton: Cancel - m_InputActionsPerSecond: 10 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_Materials: + - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_PreserveUVs: 1 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 0 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!65 &665184835 +BoxCollider: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 65020466233620882, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 665184832} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 0 + serializedVersion: 2 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!33 &665184836 +MeshFilter: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 33825707296015202, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 665184832} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1001 &673061833 +Prefab: + m_ObjectHideFlags: 0 + serializedVersion: 2 + m_Modification: + m_TransformParent: {fileID: 0} + m_Modifications: + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.x + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.z + value: -1 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.x + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.z + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.w + value: 1 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_RootOrder + value: 7 + objectReference: {fileID: 0} + - target: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} + - target: {fileID: 114650561397225712, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + propertyPath: m_UseHeuristic + value: 0 + objectReference: {fileID: 0} + m_RemovedComponents: [] + m_ParentPrefab: {fileID: 100100000, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + m_IsPrefabParent: 0 +--- !u!1001 &715789529 +Prefab: + m_ObjectHideFlags: 0 + serializedVersion: 2 + m_Modification: + m_TransformParent: {fileID: 0} + m_Modifications: + - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + propertyPath: m_LocalPosition.x + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + propertyPath: m_LocalPosition.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + propertyPath: m_LocalPosition.z + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + propertyPath: m_LocalRotation.x + value: 0.31598538 + objectReference: {fileID: 0} + - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + propertyPath: m_LocalRotation.y + value: -0.3596048 + objectReference: {fileID: 0} + - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + propertyPath: m_LocalRotation.z + value: 0.13088542 + objectReference: {fileID: 0} + - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + propertyPath: m_LocalRotation.w + value: 0.8681629 + objectReference: {fileID: 0} + - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + propertyPath: m_RootOrder + value: 1 + objectReference: {fileID: 0} + m_RemovedComponents: [] + m_ParentPrefab: {fileID: 100100000, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} + m_IsPrefabParent: 0 +--- !u!1 &742849316 +GameObject: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 0} + serializedVersion: 5 + m_Component: + - component: {fileID: 742849319} + - component: {fileID: 742849318} + - component: {fileID: 742849317} + m_Layer: 0 + m_Name: EventSystem + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!114 &742849317 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 742849316} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 1077351063, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3} + m_Name: + m_EditorClassIdentifier: + m_HorizontalAxis: Horizontal + m_VerticalAxis: Vertical + m_SubmitButton: Submit + m_CancelButton: Cancel + m_InputActionsPerSecond: 10 m_RepeatDelay: 0.5 m_ForceModuleActive: 0 --- !u!114 &742849318 @@ -564,13 +886,176 @@ Transform: m_LocalScale: {x: 1, y: 1, z: 1} m_Children: [] m_Father: {fileID: 0} - m_RootOrder: 3 + m_RootOrder: 2 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &858783320 stripped +--- !u!1001 &790097508 +Prefab: + m_ObjectHideFlags: 0 + serializedVersion: 2 + m_Modification: + m_TransformParent: {fileID: 0} + m_Modifications: + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.x + value: -1 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.z + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.x + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.z + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.w + value: 1 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_RootOrder + value: 9 + objectReference: {fileID: 0} + - target: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_Name + value: Area (2) + objectReference: {fileID: 0} + - target: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} + m_RemovedComponents: [] + m_ParentPrefab: {fileID: 100100000, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + m_IsPrefabParent: 0 +--- !u!1 &797520691 GameObject: - m_PrefabParentObject: {fileID: 1657514749044530, guid: 628960e910f094ad1909ecc88cc8016d, + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 1394424645070404, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + serializedVersion: 5 + m_Component: + - component: {fileID: 797520693} + - component: {fileID: 797520692} + m_Layer: 0 + m_Name: agentCam + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!20 &797520692 +Camera: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 20743940359151984, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 797520691} + m_Enabled: 1 + serializedVersion: 2 + m_ClearFlags: 1 + m_BackGroundColor: {r: 0.19215687, g: 0.3019608, b: 0.4745098, a: 0} + m_NormalizedViewPortRect: + serializedVersion: 2 + x: 0 + y: 0 + width: 1 + height: 1 + near clip plane: 0.3 + far clip plane: 1000 + field of view: 60 + orthographic: 1 + orthographic size: 5 + m_Depth: 0 + m_CullingMask: + serializedVersion: 2 + m_Bits: 4294967295 + m_RenderingPath: -1 + m_TargetTexture: {fileID: 8400000, guid: 114608d5384404f89bff4b6f88432958, type: 2} + m_TargetDisplay: 0 + m_TargetEye: 3 + m_HDR: 0 + m_AllowMSAA: 0 + m_AllowDynamicResolution: 0 + m_ForceIntoRT: 0 + m_OcclusionCulling: 1 + m_StereoConvergence: 10 + m_StereoSeparation: 0.022 +--- !u!4 &797520693 +Transform: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 4890346887087870, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} - m_PrefabInternal: {fileID: 1477921759} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 797520691} + m_LocalRotation: {x: 0.7071068, y: -0, z: -0, w: 0.7071068} + m_LocalPosition: {x: 0, y: 5, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 1795599558} + m_RootOrder: 2 + m_LocalEulerAnglesHint: {x: 90, y: 0, z: 0} +--- !u!1001 &885582225 +Prefab: + m_ObjectHideFlags: 0 + serializedVersion: 2 + m_Modification: + m_TransformParent: {fileID: 0} + m_Modifications: + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.x + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.z + value: 1 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.x + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.z + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.w + value: 1 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_RootOrder + value: 12 + objectReference: {fileID: 0} + - target: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_Name + value: Area (5) + objectReference: {fileID: 0} + - target: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} + m_RemovedComponents: [] + m_ParentPrefab: {fileID: 100100000, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + m_IsPrefabParent: 0 --- !u!1 &918893358 GameObject: m_ObjectHideFlags: 0 @@ -692,7 +1177,7 @@ Prefab: - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, type: 2} propertyPath: m_RootOrder - value: 8 + value: 0 objectReference: {fileID: 0} - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, type: 2} @@ -747,10 +1232,61 @@ Prefab: m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 3ce107b4a79bc4eef83afde434932a68, type: 2} m_IsPrefabParent: 0 +--- !u!1001 &949789229 +Prefab: + m_ObjectHideFlags: 0 + serializedVersion: 2 + m_Modification: + m_TransformParent: {fileID: 0} + m_Modifications: + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.x + value: -1 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.z + value: 1 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.x + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.z + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.w + value: 1 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_RootOrder + value: 11 + objectReference: {fileID: 0} + - target: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_Name + value: Area (4) + objectReference: {fileID: 0} + - target: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} + m_RemovedComponents: [] + m_ParentPrefab: {fileID: 100100000, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + m_IsPrefabParent: 0 --- !u!1 &959566328 GameObject: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 1656910849934022, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} serializedVersion: 5 m_Component: @@ -768,7 +1304,8 @@ GameObject: --- !u!23 &959566329 MeshRenderer: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 23289473826438240, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 959566328} m_Enabled: 1 @@ -802,7 +1339,8 @@ MeshRenderer: --- !u!65 &959566330 BoxCollider: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 65461269218509740, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 959566328} m_Material: {fileID: 0} @@ -814,14 +1352,16 @@ BoxCollider: --- !u!33 &959566331 MeshFilter: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 33099526047820694, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 959566328} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} --- !u!4 &959566332 Transform: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 4399229758781002, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 959566328} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} @@ -834,7 +1374,8 @@ Transform: --- !u!1 &1045409640 GameObject: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 1817050562469182, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} serializedVersion: 5 m_Component: @@ -852,7 +1393,8 @@ GameObject: --- !u!23 &1045409641 MeshRenderer: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 23048682015641784, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1045409640} m_Enabled: 1 @@ -886,7 +1428,8 @@ MeshRenderer: --- !u!65 &1045409642 BoxCollider: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 65782631683949718, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1045409640} m_Material: {fileID: 0} @@ -898,14 +1441,16 @@ BoxCollider: --- !u!33 &1045409643 MeshFilter: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 33550006272110778, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1045409640} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} --- !u!4 &1045409644 Transform: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 4088684435237278, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1045409640} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} @@ -918,7 +1463,8 @@ Transform: --- !u!1 &1208586857 GameObject: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 1881546218232006, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} serializedVersion: 5 m_Component: @@ -936,7 +1482,8 @@ GameObject: --- !u!23 &1208586858 MeshRenderer: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 23096611355272904, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1208586857} m_Enabled: 1 @@ -970,7 +1517,8 @@ MeshRenderer: --- !u!64 &1208586859 MeshCollider: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 64291102267821286, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1208586857} m_Material: {fileID: 0} @@ -984,14 +1532,16 @@ MeshCollider: --- !u!33 &1208586860 MeshFilter: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 33934167732208046, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1208586857} m_Mesh: {fileID: 10209, guid: 0000000000000000e000000000000000, type: 0} --- !u!4 &1208586861 Transform: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 4558725385767926, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1208586857} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} @@ -1068,91 +1618,55 @@ CanvasRenderer: m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1305247359} ---- !u!1001 &1477921759 +--- !u!1001 &1305594059 Prefab: m_ObjectHideFlags: 0 serializedVersion: 2 m_Modification: m_TransformParent: {fileID: 0} m_Modifications: - - target: {fileID: 114143683117020968, guid: 628960e910f094ad1909ecc88cc8016d, - type: 2} - propertyPath: agentParameters.agentCameras.Array.size - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 114143683117020968, guid: 628960e910f094ad1909ecc88cc8016d, - type: 2} - propertyPath: agentParameters.agentRenderTextures.Array.size - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4587181662868872, guid: 628960e910f094ad1909ecc88cc8016d, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_LocalPosition.x - value: 0 + value: 1 objectReference: {fileID: 0} - - target: {fileID: 4587181662868872, guid: 628960e910f094ad1909ecc88cc8016d, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_LocalPosition.y value: 0 objectReference: {fileID: 0} - - target: {fileID: 4587181662868872, guid: 628960e910f094ad1909ecc88cc8016d, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_LocalPosition.z - value: 0 + value: 1 objectReference: {fileID: 0} - - target: {fileID: 4587181662868872, guid: 628960e910f094ad1909ecc88cc8016d, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_LocalRotation.x value: 0 objectReference: {fileID: 0} - - target: {fileID: 4587181662868872, guid: 628960e910f094ad1909ecc88cc8016d, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_LocalRotation.y value: 0 objectReference: {fileID: 0} - - target: {fileID: 4587181662868872, guid: 628960e910f094ad1909ecc88cc8016d, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_LocalRotation.z value: 0 objectReference: {fileID: 0} - - target: {fileID: 4587181662868872, guid: 628960e910f094ad1909ecc88cc8016d, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_LocalRotation.w value: 1 objectReference: {fileID: 0} - - target: {fileID: 4587181662868872, guid: 628960e910f094ad1909ecc88cc8016d, type: 2} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_RootOrder - value: 7 + value: 10 objectReference: {fileID: 0} - - target: {fileID: 1657514749044530, guid: 628960e910f094ad1909ecc88cc8016d, type: 2} + - target: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} propertyPath: m_Name - value: trueAgent + value: Area (3) + objectReference: {fileID: 0} + - target: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_IsActive + value: 1 objectReference: {fileID: 0} - - target: {fileID: 114143683117020968, guid: 628960e910f094ad1909ecc88cc8016d, - type: 2} - propertyPath: agentParameters.agentRenderTextures.Array.data[0] - value: - objectReference: {fileID: 8400000, guid: 114608d5384404f89bff4b6f88432958, type: 2} - - target: {fileID: 114143683117020968, guid: 628960e910f094ad1909ecc88cc8016d, - type: 2} - propertyPath: renderCamera - value: - objectReference: {fileID: 489340228} - - target: {fileID: 23664094343919516, guid: 628960e910f094ad1909ecc88cc8016d, - type: 2} - propertyPath: m_Materials.Array.data[0] - value: - objectReference: {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} - - target: {fileID: 23992263755224526, guid: 628960e910f094ad1909ecc88cc8016d, - type: 2} - propertyPath: m_Materials.Array.data[0] - value: - objectReference: {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} - - target: {fileID: 23645573982654932, guid: 628960e910f094ad1909ecc88cc8016d, - type: 2} - propertyPath: m_Materials.Array.data[0] - value: - objectReference: {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} - - target: {fileID: 23034980085661240, guid: 628960e910f094ad1909ecc88cc8016d, - type: 2} - propertyPath: m_Materials.Array.data[0] - value: - objectReference: {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 628960e910f094ad1909ecc88cc8016d, type: 2} + m_ParentPrefab: {fileID: 100100000, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} m_IsPrefabParent: 0 --- !u!1 &1553342942 GameObject: @@ -1228,10 +1742,61 @@ CanvasRenderer: m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1553342942} +--- !u!1001 &1558187638 +Prefab: + m_ObjectHideFlags: 0 + serializedVersion: 2 + m_Modification: + m_TransformParent: {fileID: 0} + m_Modifications: + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.x + value: 1 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.z + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.x + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.z + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.w + value: 1 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_RootOrder + value: 8 + objectReference: {fileID: 0} + - target: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_Name + value: Area (1) + objectReference: {fileID: 0} + - target: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} + m_RemovedComponents: [] + m_ParentPrefab: {fileID: 100100000, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + m_IsPrefabParent: 0 --- !u!1 &1726089810 GameObject: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 1220141488340396, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} serializedVersion: 5 m_Component: @@ -1249,7 +1814,8 @@ GameObject: --- !u!23 &1726089811 MeshRenderer: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 23631786362770220, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1726089810} m_Enabled: 1 @@ -1283,7 +1849,8 @@ MeshRenderer: --- !u!65 &1726089812 BoxCollider: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 65623874337418966, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1726089810} m_Material: {fileID: 0} @@ -1295,14 +1862,16 @@ BoxCollider: --- !u!33 &1726089813 MeshFilter: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 33222498917940530, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1726089810} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} --- !u!4 &1726089814 Transform: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 4007504045862718, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1726089810} m_LocalRotation: {x: -0, y: 0.7071068, z: -0, w: 0.7071068} @@ -1312,10 +1881,240 @@ Transform: m_Father: {fileID: 486401524} m_RootOrder: 4 m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} +--- !u!1 &1777557589 +GameObject: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 1376404096451388, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + serializedVersion: 5 + m_Component: + - component: {fileID: 1777557590} + - component: {fileID: 1777557593} + - component: {fileID: 1777557592} + - component: {fileID: 1777557591} + m_Layer: 0 + m_Name: Cube (1) + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1777557590 +Transform: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 4206155882393090, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 1777557589} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0.4, y: 0, z: 0} + m_LocalScale: {x: 0.2, y: 1, z: 1} + m_Children: [] + m_Father: {fileID: 125487786} + m_RootOrder: 1 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1777557591 +MeshRenderer: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 23027717151182946, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 1777557589} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_Materials: + - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_PreserveUVs: 1 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 0 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!65 &1777557592 +BoxCollider: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 65829039252076494, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 1777557589} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 0 + serializedVersion: 2 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!33 &1777557593 +MeshFilter: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 33152597024021804, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 1777557589} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} +--- !u!1 &1795599556 +GameObject: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + serializedVersion: 5 + m_Component: + - component: {fileID: 1795599558} + - component: {fileID: 1795599557} + m_Layer: 0 + m_Name: AreaRenderTexture + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!114 &1795599557 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 114704252266302846, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 1795599556} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 676658555cb2d4884aa8285062aab2a1, type: 3} + m_Name: + m_EditorClassIdentifier: + actorObjs: [] + players: + trueAgent: {fileID: 125487785} + goalPref: {fileID: 1508142483324970, guid: 1ec4e4e96e7514d45b7ebc3ba5a9a481, type: 2} + pitPref: {fileID: 1811317785436014, guid: d13ee2db77b3a4dcc8664d2fe2a0f219, type: 2} +--- !u!4 &1795599558 +Transform: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 1795599556} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_Children: + - {fileID: 486401524} + - {fileID: 125487786} + - {fileID: 797520693} + m_Father: {fileID: 0} + m_RootOrder: 6 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!1 &1892751309 +GameObject: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 1805019891541352, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + serializedVersion: 5 + m_Component: + - component: {fileID: 1892751310} + - component: {fileID: 1892751313} + - component: {fileID: 1892751312} + - component: {fileID: 1892751311} + m_Layer: 0 + m_Name: Cube (2) + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!4 &1892751310 +Transform: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 4041386967126538, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 1892751309} + m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: -0.4} + m_LocalScale: {x: 1, y: 1, z: 0.2} + m_Children: [] + m_Father: {fileID: 125487786} + m_RootOrder: 2 + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} +--- !u!23 &1892751311 +MeshRenderer: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 23200653194041678, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 1892751309} + m_Enabled: 1 + m_CastShadows: 1 + m_ReceiveShadows: 1 + m_DynamicOccludee: 1 + m_MotionVectors: 1 + m_LightProbeUsage: 1 + m_ReflectionProbeUsage: 1 + m_Materials: + - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} + m_StaticBatchInfo: + firstSubMesh: 0 + subMeshCount: 0 + m_StaticBatchRoot: {fileID: 0} + m_ProbeAnchor: {fileID: 0} + m_LightProbeVolumeOverride: {fileID: 0} + m_ScaleInLightmap: 1 + m_PreserveUVs: 1 + m_IgnoreNormalsForChartDetection: 0 + m_ImportantGI: 0 + m_StitchLightmapSeams: 0 + m_SelectedEditorRenderState: 3 + m_MinimumChartSize: 4 + m_AutoUVMaxDistance: 0.5 + m_AutoUVMaxAngle: 89 + m_LightmapParameters: {fileID: 0} + m_SortingLayerID: 0 + m_SortingLayer: 0 + m_SortingOrder: 0 +--- !u!65 &1892751312 +BoxCollider: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 65321351298141546, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 1892751309} + m_Material: {fileID: 0} + m_IsTrigger: 0 + m_Enabled: 0 + serializedVersion: 2 + m_Size: {x: 1, y: 1, z: 1} + m_Center: {x: 0, y: 0, z: 0} +--- !u!33 &1892751313 +MeshFilter: + m_ObjectHideFlags: 0 + m_PrefabParentObject: {fileID: 33172111281575160, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} + m_PrefabInternal: {fileID: 0} + m_GameObject: {fileID: 1892751309} + m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} --- !u!1 &1938864789 GameObject: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 1898983423426052, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} serializedVersion: 5 m_Component: @@ -1333,7 +2132,8 @@ GameObject: --- !u!23 &1938864790 MeshRenderer: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 23171092457376468, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1938864789} m_Enabled: 1 @@ -1367,7 +2167,8 @@ MeshRenderer: --- !u!65 &1938864791 BoxCollider: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 65944324207520424, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1938864789} m_Material: {fileID: 0} @@ -1379,14 +2180,16 @@ BoxCollider: --- !u!33 &1938864792 MeshFilter: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 33572314435256338, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1938864789} m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} --- !u!4 &1938864793 Transform: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} + m_PrefabParentObject: {fileID: 4479182187388718, guid: 5c2bd19e4bbda4991b74387ca5d28156, + type: 2} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 1938864789} m_LocalRotation: {x: -0, y: 0.7071068, z: -0, w: 0.7071068} @@ -1396,3 +2199,53 @@ Transform: m_Father: {fileID: 486401524} m_RootOrder: 3 m_LocalEulerAnglesHint: {x: 0, y: 90, z: 0} +--- !u!1001 &2140226864 +Prefab: + m_ObjectHideFlags: 0 + serializedVersion: 2 + m_Modification: + m_TransformParent: {fileID: 0} + m_Modifications: + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.x + value: 1 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalPosition.z + value: -1 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.x + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.y + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.z + value: 0 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_LocalRotation.w + value: 1 + objectReference: {fileID: 0} + - target: {fileID: 4124767863011510, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_RootOrder + value: 14 + objectReference: {fileID: 0} + - target: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_Name + value: Area (7) + objectReference: {fileID: 0} + - target: {fileID: 1625008366184734, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} + m_RemovedComponents: [] + m_ParentPrefab: {fileID: 100100000, guid: 5c2bd19e4bbda4991b74387ca5d28156, type: 2} + m_IsPrefabParent: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridAcademy.cs b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridAcademy.cs index 74e5948c5b..ee236da61e 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridAcademy.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridAcademy.cs @@ -1,120 +1,15 @@ -using System.Collections.Generic; using UnityEngine; -using System.Linq; using MLAgents; - public class GridAcademy : Academy { - [HideInInspector] - public List actorObjs; - [HideInInspector] - public int[] players; - - public GameObject trueAgent; - - public int gridSize; - - public GameObject camObject; - Camera m_Cam; - Camera m_AgentCam; - - public GameObject agentPref; - public GameObject goalPref; - public GameObject pitPref; - GameObject[] m_Objects; - - GameObject m_Plane; - GameObject m_Sn; - GameObject m_Ss; - GameObject m_Se; - GameObject m_Sw; - - public override void InitializeAcademy() - { - gridSize = (int)resetParameters["gridSize"]; - m_Cam = camObject.GetComponent(); + public Camera MainCamera; - m_Objects = new[] {agentPref, goalPref, pitPref}; - - m_AgentCam = GameObject.Find("agentCam").GetComponent(); - - actorObjs = new List(); - - m_Plane = GameObject.Find("Plane"); - m_Sn = GameObject.Find("sN"); - m_Ss = GameObject.Find("sS"); - m_Sw = GameObject.Find("sW"); - m_Se = GameObject.Find("sE"); - } - - public void SetEnvironment() + public override void AcademyReset() { - m_Cam.transform.position = new Vector3(-((int)resetParameters["gridSize"] - 1) / 2f, + MainCamera.transform.position = new Vector3(-((int)resetParameters["gridSize"] - 1) / 2f, (int)resetParameters["gridSize"] * 1.25f, -((int)resetParameters["gridSize"] - 1) / 2f); - m_Cam.orthographicSize = ((int)resetParameters["gridSize"] + 5f) / 2f; - - var playersList = new List(); - - for (var i = 0; i < (int)resetParameters["numObstacles"]; i++) - { - playersList.Add(2); - } - - for (var i = 0; i < (int)resetParameters["numGoals"]; i++) - { - playersList.Add(1); - } - players = playersList.ToArray(); - - m_Plane.transform.localScale = new Vector3(gridSize / 10.0f, 1f, gridSize / 10.0f); - m_Plane.transform.position = new Vector3((gridSize - 1) / 2f, -0.5f, (gridSize - 1) / 2f); - m_Sn.transform.localScale = new Vector3(1, 1, gridSize + 2); - m_Ss.transform.localScale = new Vector3(1, 1, gridSize + 2); - m_Sn.transform.position = new Vector3((gridSize - 1) / 2f, 0.0f, gridSize); - m_Ss.transform.position = new Vector3((gridSize - 1) / 2f, 0.0f, -1); - m_Se.transform.localScale = new Vector3(1, 1, gridSize + 2); - m_Sw.transform.localScale = new Vector3(1, 1, gridSize + 2); - m_Se.transform.position = new Vector3(gridSize, 0.0f, (gridSize - 1) / 2f); - m_Sw.transform.position = new Vector3(-1, 0.0f, (gridSize - 1) / 2f); - - m_AgentCam.orthographicSize = (gridSize) / 2f; - m_AgentCam.transform.position = new Vector3((gridSize - 1) / 2f, gridSize + 1f, (gridSize - 1) / 2f); - } - - public override void AcademyReset() - { - foreach (var actor in actorObjs) - { - DestroyImmediate(actor); - } - SetEnvironment(); - - actorObjs.Clear(); - - var numbers = new HashSet(); - while (numbers.Count < players.Length + 1) - { - numbers.Add(Random.Range(0, gridSize * gridSize)); - } - var numbersA = Enumerable.ToArray(numbers); - - for (var i = 0; i < players.Length; i++) - { - var x = (numbersA[i]) / gridSize; - var y = (numbersA[i]) % gridSize; - var actorObj = Instantiate(m_Objects[players[i]]); - actorObj.transform.position = new Vector3(x, -0.25f, y); - actorObjs.Add(actorObj); - } - - var xA = (numbersA[players.Length]) / gridSize; - var yA = (numbersA[players.Length]) % gridSize; - trueAgent.transform.position = new Vector3(xA, -0.25f, yA); - } - - public override void AcademyStep() - { + MainCamera.orthographicSize = ((int)resetParameters["gridSize"] + 5f) / 2f; } } diff --git a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridAgent.cs b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridAgent.cs index 907f67ec65..c142413d2d 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridAgent.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridAgent.cs @@ -2,11 +2,14 @@ using UnityEngine; using System.Linq; using MLAgents; +using UnityEngine.Serialization; public class GridAgent : Agent { + private Academy m_Academy; + [FormerlySerializedAs("m_Area")] [Header("Specific to GridWorld")] - private GridAcademy m_Academy; + public GridArea area; public float timeBetweenDecisionsAtInference; private float m_TimeSinceDecision; @@ -27,7 +30,7 @@ public class GridAgent : Agent public override void InitializeAgent() { - m_Academy = FindObjectOfType(typeof(GridAcademy)) as GridAcademy; + m_Academy = FindObjectOfType(); } public override void CollectObservations() @@ -50,7 +53,7 @@ private void SetMask() // Prevents the agent from picking an action that would make it collide with a wall var positionX = (int)transform.position.x; var positionZ = (int)transform.position.z; - var maxPosition = m_Academy.gridSize - 1; + var maxPosition = (int)m_Academy.resetParameters["gridSize"] - 1; if (positionX == 0) { @@ -120,10 +123,31 @@ public override void AgentAction(float[] vectorAction, string textAction) } } + public override float[] Heuristic() + { + if (Input.GetKey(KeyCode.D)) + { + return new float[] { k_Right }; + } + if (Input.GetKey(KeyCode.W)) + { + return new float[] { k_Up }; + } + if (Input.GetKey(KeyCode.A)) + { + return new float[] { k_Left }; + } + if (Input.GetKey(KeyCode.S)) + { + return new float[] { k_Down }; + } + return new float[] { k_NoAction }; + } + // to be implemented by the developer public override void AgentReset() { - m_Academy.AcademyReset(); + area.AreaReset(); } public void FixedUpdate() diff --git a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridArea.cs b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridArea.cs new file mode 100644 index 0000000000..4c19f1ba3d --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridArea.cs @@ -0,0 +1,115 @@ +using System.Collections.Generic; +using UnityEngine; +using System.Linq; +using MLAgents; + + +public class GridArea : MonoBehaviour +{ + [HideInInspector] + public List actorObjs; + [HideInInspector] + public int[] players; + + public GameObject trueAgent; + + private ResetParameters m_ResetParameters; + + Camera m_AgentCam; + + public GameObject goalPref; + public GameObject pitPref; + GameObject[] m_Objects; + + GameObject m_Plane; + GameObject m_Sn; + GameObject m_Ss; + GameObject m_Se; + GameObject m_Sw; + + private Vector3 m_InitialPosition; + + public void Awake() + { + m_ResetParameters = FindObjectOfType().resetParameters; + + m_Objects = new[] { goalPref, pitPref }; + + m_AgentCam = transform.Find("agentCam").GetComponent(); + + actorObjs = new List(); + + var sceneTransform = transform.Find("scene"); + + m_Plane = sceneTransform.Find("Plane").gameObject; + m_Sn = sceneTransform.Find("sN").gameObject; + m_Ss = sceneTransform.Find("sS").gameObject; + m_Sw = sceneTransform.Find("sW").gameObject; + m_Se = sceneTransform.Find("sE").gameObject; + m_InitialPosition = transform.position; + } + + public void SetEnvironment() + { + transform.position = m_InitialPosition * (m_ResetParameters["gridSize"] + 1); + var playersList = new List(); + + for (var i = 0; i < (int)m_ResetParameters["numObstacles"]; i++) + { + playersList.Add(1); + } + + for (var i = 0; i < (int)m_ResetParameters["numGoals"]; i++) + { + playersList.Add(0); + } + players = playersList.ToArray(); + + var gridSize = (int)m_ResetParameters["gridSize"]; + m_Plane.transform.localScale = new Vector3(gridSize / 10.0f, 1f, gridSize / 10.0f); + m_Plane.transform.localPosition = new Vector3((gridSize - 1) / 2f, -0.5f, (gridSize - 1) / 2f); + m_Sn.transform.localScale = new Vector3(1, 1, gridSize + 2); + m_Ss.transform.localScale = new Vector3(1, 1, gridSize + 2); + m_Sn.transform.localPosition = new Vector3((gridSize - 1) / 2f, 0.0f, gridSize); + m_Ss.transform.localPosition = new Vector3((gridSize - 1) / 2f, 0.0f, -1); + m_Se.transform.localScale = new Vector3(1, 1, gridSize + 2); + m_Sw.transform.localScale = new Vector3(1, 1, gridSize + 2); + m_Se.transform.localPosition = new Vector3(gridSize, 0.0f, (gridSize - 1) / 2f); + m_Sw.transform.localPosition = new Vector3(-1, 0.0f, (gridSize - 1) / 2f); + + m_AgentCam.orthographicSize = (gridSize) / 2f; + m_AgentCam.transform.localPosition = new Vector3((gridSize - 1) / 2f, gridSize + 1f, (gridSize - 1) / 2f); + } + + public void AreaReset() + { + var gridSize = (int)m_ResetParameters["gridSize"]; + foreach (var actor in actorObjs) + { + DestroyImmediate(actor); + } + SetEnvironment(); + + actorObjs.Clear(); + + var numbers = new HashSet(); + while (numbers.Count < players.Length + 1) + { + numbers.Add(Random.Range(0, gridSize * gridSize)); + } + var numbersA = Enumerable.ToArray(numbers); + + for (var i = 0; i < players.Length; i++) + { + var x = (numbersA[i]) / gridSize; + var y = (numbersA[i]) % gridSize; + var actorObj = Instantiate(m_Objects[players[i]], transform); + actorObj.transform.localPosition = new Vector3(x, -0.25f, y); + actorObjs.Add(actorObj); + } + + var xA = (numbersA[players.Length]) / gridSize; + var yA = (numbersA[players.Length]) % gridSize; + trueAgent.transform.localPosition = new Vector3(xA, -0.25f, yA); + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/AgentActionProto.cs.meta b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridArea.cs.meta similarity index 83% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/AgentActionProto.cs.meta rename to UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridArea.cs.meta index 3f09aabf4f..cfac9e9f5b 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/AgentActionProto.cs.meta +++ b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/Scripts/GridArea.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 4482f127d4a874cf8a11da2b2cc27dc9 +guid: 676658555cb2d4884aa8285062aab2a1 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/TFModels/GridWorld.nn b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/TFModels/GridWorld.nn new file mode 100644 index 0000000000..7a438f090e Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/TFModels/GridWorld.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/TFModels/GridWorld.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/TFModels/GridWorld.nn.meta new file mode 100644 index 0000000000..c0ccd2699b --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/TFModels/GridWorld.nn.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: a812f1ce7763a4a0c912717f3594fe20 +ScriptedImporter: + userData: + assetBundleName: + assetBundleVariant: + script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/TFModels/GridWorldLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/TFModels/GridWorldLearning.nn deleted file mode 100644 index a397d869db..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/TFModels/GridWorldLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/TFModels/GridWorldLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/GridWorld/TFModels/GridWorldLearning.nn.meta deleted file mode 100644 index 829233c6b3..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/GridWorld/TFModels/GridWorldLearning.nn.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: 07afbd1d35ed345eeb850fcbb59eae0b -ScriptedImporter: - userData: - assetBundleName: - assetBundleVariant: - script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains.meta b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains.meta deleted file mode 100644 index 7c230b1e35..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: b27eedc1c0f4e4056b413a012ea81974 -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/HallwayLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/HallwayLearning.asset deleted file mode 100644 index 6487bbfc2e..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/HallwayLearning.asset +++ /dev/null @@ -1,23 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: HallwayLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 36 - numStackedVectorObservations: 3 - vectorActionSize: 05000000 - cameraResolutions: [] - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - model: {fileID: 11400000, guid: b3f3b601fa5e84185862261041525ea9, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/HallwayLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/HallwayLearning.asset.meta deleted file mode 100644 index 5f8bc2ee5b..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/HallwayLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 533f2edd327794ca996d0320901b501c -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/HallwayPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/HallwayPlayer.asset deleted file mode 100644 index 8fd30d5583..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/HallwayPlayer.asset +++ /dev/null @@ -1,36 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: HallwayPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 36 - numStackedVectorObservations: 3 - vectorActionSize: 05000000 - cameraResolutions: [] - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - keyContinuousPlayerActions: [] - axisContinuousPlayerActions: [] - discretePlayerActions: - - key: 119 - branchIndex: 0 - value: 1 - - key: 115 - branchIndex: 0 - value: 2 - - key: 100 - branchIndex: 0 - value: 3 - - key: 97 - branchIndex: 0 - value: 4 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/HallwayPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/HallwayPlayer.asset.meta deleted file mode 100644 index f05459e852..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/HallwayPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 51f870f0190b643adae5432c0e6205e7 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/VisualHallwayLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/VisualHallwayLearning.asset deleted file mode 100644 index 18eb01bd9b..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/VisualHallwayLearning.asset +++ /dev/null @@ -1,26 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: VisualHallwayLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 0 - numStackedVectorObservations: 1 - vectorActionSize: 05000000 - cameraResolutions: - - width: 84 - height: 84 - blackAndWhite: 0 - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - model: {fileID: 0} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/VisualHallwayLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/VisualHallwayLearning.asset.meta deleted file mode 100644 index 935c9f9ef1..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/VisualHallwayLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: fe56dd72ed38a4c2fb5419aba1e2d5f2 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/VisualHallwayPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/VisualHallwayPlayer.asset deleted file mode 100644 index fc317636b1..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/VisualHallwayPlayer.asset +++ /dev/null @@ -1,36 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: VisualHallwayPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 0 - numStackedVectorObservations: 1 - vectorActionSize: 05000000 - cameraResolutions: [] - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - keyContinuousPlayerActions: [] - axisContinuousPlayerActions: [] - discretePlayerActions: - - key: 97 - branchIndex: 0 - value: 4 - - key: 100 - branchIndex: 0 - value: 3 - - key: 119 - branchIndex: 0 - value: 1 - - key: 115 - branchIndex: 0 - value: 2 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/VisualHallwayPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/VisualHallwayPlayer.asset.meta deleted file mode 100644 index 9cc2ab18e6..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Brains/VisualHallwayPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: a36aad05c06144991a0a5e87de40d003 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/StudentSymbolFinderArea.prefab b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/StudentSymbolFinderArea.prefab deleted file mode 100644 index acfaa55e0c..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/StudentSymbolFinderArea.prefab +++ /dev/null @@ -1,1600 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!1001 &100100000 -Prefab: - m_ObjectHideFlags: 1 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: [] - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 0} - m_RootGameObject: {fileID: 1274628913364804} - m_IsPrefabParent: 1 ---- !u!1 &1055717072077452 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4600134730693562} - - component: {fileID: 33576680548916784} - - component: {fileID: 23145651662145446} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1080386883178956 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4519352940251690} - - component: {fileID: 33803401099074782} - - component: {fileID: 65926043926358412} - - component: {fileID: 23008948052944308} - m_Layer: 0 - m_Name: Symbol_O - m_TagString: symbol_O_Goal - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1099731155068792 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4004076973647508} - - component: {fileID: 33710140165296724} - - component: {fileID: 23383329460805644} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1101584272416062 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4938541899478308} - - component: {fileID: 33079656341089690} - - component: {fileID: 23707059742108376} - m_Layer: 0 - m_Name: symbol_circle - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1273051807105660 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4611322161096344} - - component: {fileID: 33567378096565084} - - component: {fileID: 23407317854853782} - m_Layer: 0 - m_Name: symbol_x - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1274628913364804 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4533847489817540} - m_Layer: 0 - m_Name: StudentSymbolFinderArea - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1295801862460564 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4477032473423966} - - component: {fileID: 33838268464619044} - - component: {fileID: 65151762011244390} - - component: {fileID: 23339194007396686} - m_Layer: 0 - m_Name: Symbol_X - m_TagString: symbol_X_Goal - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1346558540986106 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4677236927751450} - - component: {fileID: 33331540215097958} - - component: {fileID: 23190151837691634} - m_Layer: 0 - m_Name: mouth - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1361522181596410 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4582322072978638} - - component: {fileID: 33161871073615424} - - component: {fileID: 23429924986494198} - - component: {fileID: 65186181504438908} - m_Layer: 0 - m_Name: symbol_X_Goal - m_TagString: symbol_X_Goal - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1406588313331992 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4108701234799872} - - component: {fileID: 33813947472522474} - - component: {fileID: 23240579083218126} - - component: {fileID: 65481764384235914} - m_Layer: 0 - m_Name: Ground - m_TagString: ground - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 4294967295 - m_IsActive: 1 ---- !u!1 &1427081450337306 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4429683763635690} - - component: {fileID: 33486223940503602} - - component: {fileID: 23467140940011188} - m_Layer: 0 - m_Name: AgentCube_Blue - m_TagString: agent - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1492686855108494 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4006877064760510} - - component: {fileID: 33286909133843244} - - component: {fileID: 23563250696752858} - - component: {fileID: 65603974142455606} - m_Layer: 0 - m_Name: symbol_O_Goal - m_TagString: symbol_O_Goal - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1554225037989888 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4603214574004846} - - component: {fileID: 33952019346276406} - - component: {fileID: 65918870089765118} - - component: {fileID: 23215497790380468} - m_Layer: 0 - m_Name: Symbol_X - m_TagString: symbol_X - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1600788470282224 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4630815880229136} - - component: {fileID: 33304621390013932} - - component: {fileID: 23654196462637332} - m_Layer: 0 - m_Name: symbol_circle - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1694869030961772 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4904432726956002} - - component: {fileID: 20578156774140218} - m_Layer: 0 - m_Name: PlayerCam - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 0 ---- !u!1 &1739401085860348 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4997699144557586} - - component: {fileID: 20795274633436902} - m_Layer: 0 - m_Name: AgentCamera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 0 ---- !u!1 &1774241143684238 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4268282596879828} - - component: {fileID: 33026326647395192} - - component: {fileID: 23467801761516014} - - component: {fileID: 64815687293671286} - m_Layer: 0 - m_Name: Walls - m_TagString: wall - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 4294967295 - m_IsActive: 1 ---- !u!1 &1778657042051106 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4814654062247284} - - component: {fileID: 33019187693963632} - - component: {fileID: 23031648290278420} - m_Layer: 0 - m_Name: symbol_x - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1812740348993988 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4878745346939332} - - component: {fileID: 33411238453053674} - - component: {fileID: 23739067482414142} - m_Layer: 0 - m_Name: Headband - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1826701478291064 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4292945615372686} - - component: {fileID: 65155947613591668} - - component: {fileID: 54348066631300022} - - component: {fileID: 114706429822940556} - - component: {fileID: 114408745897253222} - m_Layer: 0 - m_Name: Agent - m_TagString: agent - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1945108862147050 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4151118525255086} - - component: {fileID: 33276758276021836} - - component: {fileID: 65598040798074938} - - component: {fileID: 23005698743920596} - m_Layer: 0 - m_Name: Symbol_O - m_TagString: symbol_O - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &4004076973647508 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1099731155068792} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4429683763635690} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4006877064760510 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1492686855108494} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -7, y: 0.588, z: 22.29} - m_LocalScale: {x: 4, y: 0.1, z: 4} - m_Children: - - {fileID: 4519352940251690} - m_Father: {fileID: 4533847489817540} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4108701234799872 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1406588313331992} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0.015001526, y: 0.5, z: 0} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 4533847489817540} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4151118525255086 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1945108862147050} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 2.45, z: 0} - m_LocalScale: {x: 4, y: 4, z: 1} - m_Children: - - {fileID: 4938541899478308} - m_Father: {fileID: 4533847489817540} - m_RootOrder: 5 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4268282596879828 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1774241143684238} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0.01499878, y: -0.5, z: 0} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 4533847489817540} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4292945615372686 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1826701478291064} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 1.54, z: -8.5} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4429683763635690} - - {fileID: 4904432726956002} - m_Father: {fileID: 4533847489817540} - m_RootOrder: 6 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4429683763635690 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1427081450337306} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4997699144557586} - - {fileID: 4004076973647508} - - {fileID: 4600134730693562} - - {fileID: 4677236927751450} - - {fileID: 4878745346939332} - m_Father: {fileID: 4292945615372686} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4477032473423966 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1295801862460564} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 20, z: 0.625} - m_LocalScale: {x: 1.0202925, y: 40.1236, z: 0.286225} - m_Children: - - {fileID: 4611322161096344} - m_Father: {fileID: 4582322072978638} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4519352940251690 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1080386883178956} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 20, z: 0.625} - m_LocalScale: {x: 1.0202925, y: 40.1236, z: 0.286225} - m_Children: - - {fileID: 4630815880229136} - m_Father: {fileID: 4006877064760510} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4533847489817540 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1274628913364804} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 27, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4268282596879828} - - {fileID: 4108701234799872} - - {fileID: 4006877064760510} - - {fileID: 4582322072978638} - - {fileID: 4603214574004846} - - {fileID: 4151118525255086} - - {fileID: 4292945615372686} - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4582322072978638 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1361522181596410} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 7, y: 0.5, z: 22.29} - m_LocalScale: {x: 4, y: 0.1, z: 4} - m_Children: - - {fileID: 4477032473423966} - m_Father: {fileID: 4533847489817540} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4600134730693562 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1055717072077452} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: -0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4429683763635690} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4603214574004846 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1554225037989888} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 2.45, z: 0} - m_LocalScale: {x: 4, y: 4, z: 1} - m_Children: - - {fileID: 4814654062247284} - m_Father: {fileID: 4533847489817540} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4611322161096344 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1273051807105660} - m_LocalRotation: {x: -0, y: -0, z: 0.38268343, w: 0.92387956} - m_LocalPosition: {x: 0, y: 0, z: -0.337} - m_LocalScale: {x: 0.39643255, y: 0.39643255, z: 1.4011297} - m_Children: [] - m_Father: {fileID: 4477032473423966} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 45} ---- !u!4 &4630815880229136 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1600788470282224} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: -0.33826} - m_LocalScale: {x: 0.37466624, y: 0.38109082, z: 1.3355573} - m_Children: [] - m_Father: {fileID: 4519352940251690} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4677236927751450 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1346558540986106} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0, y: -0.18299997, z: 0.50040054} - m_LocalScale: {x: 0.27602, y: 0.042489994, z: 0.13891} - m_Children: [] - m_Father: {fileID: 4429683763635690} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4814654062247284 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1778657042051106} - m_LocalRotation: {x: -0, y: -0, z: 0.38268343, w: 0.92387956} - m_LocalPosition: {x: 0, y: 0, z: -0.337} - m_LocalScale: {x: 0.39643255, y: 0.39643255, z: 1.4011297} - m_Children: [] - m_Father: {fileID: 4603214574004846} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 45} ---- !u!4 &4878745346939332 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1812740348993988} - m_LocalRotation: {x: -0, y: -0, z: 0.016506119, w: 0.9998638} - m_LocalPosition: {x: 0, y: 0.341, z: 0} - m_LocalScale: {x: 1.0441425, y: 0.19278127, z: 1.0441422} - m_Children: [] - m_Father: {fileID: 4429683763635690} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: -179.99998, z: 1.8920001} ---- !u!4 &4904432726956002 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1694869030961772} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 107, z: 4} - m_LocalScale: {x: 100, y: 100, z: 100} - m_Children: [] - m_Father: {fileID: 4292945615372686} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4938541899478308 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1101584272416062} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: -0.33826} - m_LocalScale: {x: 0.37466624, y: 0.38109082, z: 1.3355573} - m_Children: [] - m_Father: {fileID: 4151118525255086} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4997699144557586 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1739401085860348} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0.15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4429683763635690} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!20 &20578156774140218 -Camera: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1694869030961772} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.5043253, g: 0.5998091, b: 0.64705884, a: 0} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.1 - far clip plane: 5000 - field of view: 60 - orthographic: 0 - orthographic size: 10 - m_Depth: 2 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!20 &20795274633436902 -Camera: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1739401085860348} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.46666667, g: 0.5647059, b: 0.60784316, a: 1} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294950911 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!23 &23005698743920596 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1945108862147050} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23008948052944308 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1080386883178956} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23031648290278420 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1778657042051106} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 69fefdd39d2b34b169e921910bed9c0d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23145651662145446 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1055717072077452} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23190151837691634 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1346558540986106} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23215497790380468 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1554225037989888} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23240579083218126 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1406588313331992} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: d3855c73a8adc453f89d8df66068f21f, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23339194007396686 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1295801862460564} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23383329460805644 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1099731155068792} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23407317854853782 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1273051807105660} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 69fefdd39d2b34b169e921910bed9c0d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23429924986494198 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1361522181596410} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: c67450f290f3e4897bc40276a619e78d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23467140940011188 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1427081450337306} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23467801761516014 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1774241143684238} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23563250696752858 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1492686855108494} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: c67450f290f3e4897bc40276a619e78d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23654196462637332 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1600788470282224} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 69fefdd39d2b34b169e921910bed9c0d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23707059742108376 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1101584272416062} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 69fefdd39d2b34b169e921910bed9c0d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23739067482414142 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1812740348993988} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!33 &33019187693963632 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1778657042051106} - m_Mesh: {fileID: 4300000, guid: df826ba32791e458caefe83b4498ce35, type: 3} ---- !u!33 &33026326647395192 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1774241143684238} - m_Mesh: {fileID: 4300000, guid: 9bf69916dc93a284e857ff6ffa070331, type: 3} ---- !u!33 &33079656341089690 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1101584272416062} - m_Mesh: {fileID: 4300000, guid: 809601725d53c41fb9c7a75071bfbf51, type: 3} ---- !u!33 &33161871073615424 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1361522181596410} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33276758276021836 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1945108862147050} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33286909133843244 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1492686855108494} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33304621390013932 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1600788470282224} - m_Mesh: {fileID: 4300000, guid: 809601725d53c41fb9c7a75071bfbf51, type: 3} ---- !u!33 &33331540215097958 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1346558540986106} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33411238453053674 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1812740348993988} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33486223940503602 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1427081450337306} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33567378096565084 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1273051807105660} - m_Mesh: {fileID: 4300000, guid: df826ba32791e458caefe83b4498ce35, type: 3} ---- !u!33 &33576680548916784 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1055717072077452} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33710140165296724 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1099731155068792} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33803401099074782 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1080386883178956} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33813947472522474 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1406588313331992} - m_Mesh: {fileID: 4300000, guid: e36ed69b3747404418ab8087954fe038, type: 3} ---- !u!33 &33838268464619044 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1295801862460564} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33952019346276406 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1554225037989888} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!54 &54348066631300022 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1826701478291064} - serializedVersion: 2 - m_Mass: 25 - m_Drag: 2 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 80 - m_CollisionDetection: 2 ---- !u!64 &64815687293671286 -MeshCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1774241143684238} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 3 - m_Convex: 0 - m_CookingOptions: 14 - m_SkinWidth: 0.01 - m_Mesh: {fileID: 4300000, guid: 9bf69916dc93a284e857ff6ffa070331, type: 3} ---- !u!65 &65151762011244390 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1295801862460564} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65155947613591668 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1826701478291064} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65186181504438908 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1361522181596410} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65481764384235914 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1406588313331992} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 2000, y: 100, z: 5000} - m_Center: {x: -1.5001221, y: -50, z: 0} ---- !u!65 &65598040798074938 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1945108862147050} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65603974142455606 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1492686855108494} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65918870089765118 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1554225037989888} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65926043926358412 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1080386883178956} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!114 &114408745897253222 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1826701478291064} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} - m_Name: - m_EditorClassIdentifier: ---- !u!114 &114706429822940556 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1826701478291064} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: b446afae240924105b36d07e8d17a608, type: 3} - m_Name: - m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 533f2edd327794ca996d0320901b501c, type: 2} - agentParameters: - agentCameras: [] - agentRenderTextures: [] - maxStep: 3000 - resetOnDone: 1 - onDemandDecision: 0 - numberOfActionsBetweenDecisions: 6 - ground: {fileID: 1406588313331992} - area: {fileID: 1274628913364804} - symbolOGoal: {fileID: 1492686855108494} - symbolXGoal: {fileID: 1361522181596410} - symbolO: {fileID: 1945108862147050} - symbolX: {fileID: 1554225037989888} - useVectorObs: 1 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/StudentSymbolFinderArea.prefab.meta b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/StudentSymbolFinderArea.prefab.meta deleted file mode 100644 index 3ef121e07a..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/StudentSymbolFinderArea.prefab.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 344c35f795e0c4641991cc5b96547e6d -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 100100000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/SymbolFinderArea.prefab b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/SymbolFinderArea.prefab index 0c1d8d3f7e..8a3a024dba 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/SymbolFinderArea.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/SymbolFinderArea.prefab @@ -246,6 +246,7 @@ GameObject: - component: {fileID: 4933884233896554} - component: {fileID: 65639693558106190} - component: {fileID: 54112968250075710} + - component: {fileID: 114907778469006590} - component: {fileID: 114286701363010626} - component: {fileID: 114569343444552314} m_Layer: 0 @@ -1572,10 +1573,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: b446afae240924105b36d07e8d17a608, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 533f2edd327794ca996d0320901b501c, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 3000 resetOnDone: 1 onDemandDecision: 0 @@ -1598,3 +1596,24 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} m_Name: m_EditorClassIdentifier: +--- !u!114 &114907778469006590 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1471560210313468} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 36 + numStackedVectorObservations: 3 + vectorActionSize: 05000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 11400000, guid: 317f4f8da7e4846b3aae0969781824a2, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: Hallway diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/TeacherSymbolFinderArea.prefab b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/TeacherSymbolFinderArea.prefab deleted file mode 100644 index f537bfa064..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/TeacherSymbolFinderArea.prefab +++ /dev/null @@ -1,1600 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!1001 &100100000 -Prefab: - m_ObjectHideFlags: 1 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: [] - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 0} - m_RootGameObject: {fileID: 1734350477348694} - m_IsPrefabParent: 1 ---- !u!1 &1065283608971268 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4997899861289026} - - component: {fileID: 33086421009090170} - - component: {fileID: 23037714890468710} - m_Layer: 0 - m_Name: mouth - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1077929938690272 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4744178914414132} - - component: {fileID: 33316394158339706} - - component: {fileID: 23766549631743912} - - component: {fileID: 65502930704067228} - m_Layer: 0 - m_Name: symbol_X_Goal - m_TagString: symbol_X_Goal - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1163818123550610 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4847719067342670} - - component: {fileID: 33256516820199636} - - component: {fileID: 23135768181633000} - m_Layer: 0 - m_Name: symbol_circle - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1174787094534024 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4479850696862608} - - component: {fileID: 33996964478850816} - - component: {fileID: 23205293012073254} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1202355998263646 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4388255954958256} - - component: {fileID: 33982971723741372} - - component: {fileID: 23206137672525222} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1260402489684410 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4225085900736984} - - component: {fileID: 33546418793821296} - - component: {fileID: 23514181277034074} - - component: {fileID: 64625602348965916} - m_Layer: 0 - m_Name: Walls - m_TagString: wall - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 4294967295 - m_IsActive: 1 ---- !u!1 &1266065233787350 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4327760912049326} - - component: {fileID: 33828284022210772} - - component: {fileID: 23100965949668260} - m_Layer: 0 - m_Name: symbol_x - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1313448816330826 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4126000219834156} - - component: {fileID: 33643559530666352} - - component: {fileID: 23317764117839544} - m_Layer: 0 - m_Name: Headband - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1320526924683862 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4187195150774660} - - component: {fileID: 33243712784099842} - - component: {fileID: 65066203413401102} - - component: {fileID: 23487091801190124} - m_Layer: 0 - m_Name: Symbol_O - m_TagString: symbol_O - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1370603211015682 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4033662013957368} - - component: {fileID: 33476379338443586} - - component: {fileID: 65047679382380448} - - component: {fileID: 23216873304066102} - m_Layer: 0 - m_Name: Symbol_X - m_TagString: symbol_X - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1448428737387446 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4597992300155750} - - component: {fileID: 65619555895512540} - - component: {fileID: 54008369775390722} - - component: {fileID: 114351483917802064} - - component: {fileID: 114530730548664348} - m_Layer: 0 - m_Name: Agent - m_TagString: agent - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1495070535419640 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4357617254700176} - - component: {fileID: 33415730187217894} - - component: {fileID: 23115474905733760} - - component: {fileID: 65453478739237514} - m_Layer: 0 - m_Name: Ground - m_TagString: ground - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 4294967295 - m_IsActive: 1 ---- !u!1 &1502887428953764 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4496962597703422} - - component: {fileID: 20594504633560562} - m_Layer: 0 - m_Name: AgentCamera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 0 ---- !u!1 &1629945218469052 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4955322665183280} - - component: {fileID: 33494227520390422} - - component: {fileID: 23271215106937320} - m_Layer: 0 - m_Name: symbol_circle - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1637967317274960 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4869363629049768} - - component: {fileID: 33468197734618918} - - component: {fileID: 65179230020656150} - - component: {fileID: 23993433136005922} - m_Layer: 0 - m_Name: Symbol_O - m_TagString: symbol_O_Goal - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1686476281424084 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4441134634317432} - - component: {fileID: 33143746929301160} - - component: {fileID: 65586508290816660} - - component: {fileID: 23318338541939958} - m_Layer: 0 - m_Name: Symbol_X - m_TagString: symbol_X_Goal - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1703917813165754 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4558216715397324} - - component: {fileID: 33611674429781984} - - component: {fileID: 23568730133498752} - - component: {fileID: 65793315742155134} - m_Layer: 0 - m_Name: symbol_O_Goal - m_TagString: symbol_O_Goal - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1708721838681596 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4163120443520536} - - component: {fileID: 20431662029305678} - m_Layer: 0 - m_Name: PlayerCam - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 0 ---- !u!1 &1734350477348694 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4935900384141556} - m_Layer: 0 - m_Name: TeacherSymbolFinderArea - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1925469033031814 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4436323935396220} - - component: {fileID: 33262043711115170} - - component: {fileID: 23523448984590452} - m_Layer: 0 - m_Name: symbol_x - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1996896798508238 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4366959230000766} - - component: {fileID: 33189171670298254} - - component: {fileID: 23453242353443288} - m_Layer: 0 - m_Name: AgentCube_Blue - m_TagString: agent - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &4033662013957368 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1370603211015682} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 2.45, z: 0} - m_LocalScale: {x: 4, y: 4, z: 1} - m_Children: - - {fileID: 4436323935396220} - m_Father: {fileID: 4935900384141556} - m_RootOrder: 5 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4126000219834156 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1313448816330826} - m_LocalRotation: {x: -0, y: -0, z: 0.016506119, w: 0.9998638} - m_LocalPosition: {x: 0, y: 0.341, z: 0} - m_LocalScale: {x: 1.0441425, y: 0.19278127, z: 1.0441422} - m_Children: [] - m_Father: {fileID: 4366959230000766} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: -179.99998, z: 1.8920001} ---- !u!4 &4163120443520536 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1708721838681596} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 107, z: 4} - m_LocalScale: {x: 100, y: 100, z: 100} - m_Children: [] - m_Father: {fileID: 4597992300155750} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4187195150774660 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1320526924683862} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 2.45, z: 0} - m_LocalScale: {x: 4, y: 4, z: 1} - m_Children: - - {fileID: 4955322665183280} - m_Father: {fileID: 4935900384141556} - m_RootOrder: 6 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4225085900736984 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1260402489684410} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0.01499878, y: -0.5, z: 0} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 4935900384141556} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4327760912049326 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1266065233787350} - m_LocalRotation: {x: -0, y: -0, z: 0.38268343, w: 0.92387956} - m_LocalPosition: {x: 0, y: 0, z: -0.337} - m_LocalScale: {x: 0.39643255, y: 0.39643255, z: 1.4011297} - m_Children: [] - m_Father: {fileID: 4441134634317432} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 45} ---- !u!4 &4357617254700176 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1495070535419640} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0.015001526, y: 0.5, z: 0} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 4935900384141556} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4366959230000766 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1996896798508238} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4496962597703422} - - {fileID: 4479850696862608} - - {fileID: 4388255954958256} - - {fileID: 4997899861289026} - - {fileID: 4126000219834156} - m_Father: {fileID: 4597992300155750} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4388255954958256 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1202355998263646} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: -0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4366959230000766} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4436323935396220 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1925469033031814} - m_LocalRotation: {x: -0, y: -0, z: 0.38268343, w: 0.92387956} - m_LocalPosition: {x: 0, y: 0, z: -0.337} - m_LocalScale: {x: 0.39643255, y: 0.39643255, z: 1.4011297} - m_Children: [] - m_Father: {fileID: 4033662013957368} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 45} ---- !u!4 &4441134634317432 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1686476281424084} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 20, z: 0.625} - m_LocalScale: {x: 1.0202925, y: 40.1236, z: 0.286225} - m_Children: - - {fileID: 4327760912049326} - m_Father: {fileID: 4744178914414132} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4479850696862608 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1174787094534024} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4366959230000766} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4496962597703422 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1502887428953764} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0.15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4366959230000766} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4558216715397324 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1703917813165754} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -7, y: 0.588, z: 22.29} - m_LocalScale: {x: 4, y: 0.1, z: 4} - m_Children: - - {fileID: 4869363629049768} - m_Father: {fileID: 4935900384141556} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4597992300155750 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1448428737387446} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 1.54, z: -8.5} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4366959230000766} - - {fileID: 4163120443520536} - m_Father: {fileID: 4935900384141556} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4744178914414132 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1077929938690272} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 7, y: 0.5, z: 22.29} - m_LocalScale: {x: 4, y: 0.1, z: 4} - m_Children: - - {fileID: 4441134634317432} - m_Father: {fileID: 4935900384141556} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4847719067342670 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1163818123550610} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: -0.33826} - m_LocalScale: {x: 0.37466624, y: 0.38109082, z: 1.3355573} - m_Children: [] - m_Father: {fileID: 4869363629049768} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4869363629049768 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1637967317274960} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 20, z: 0.625} - m_LocalScale: {x: 1.0202925, y: 40.1236, z: 0.286225} - m_Children: - - {fileID: 4847719067342670} - m_Father: {fileID: 4558216715397324} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4935900384141556 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1734350477348694} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4225085900736984} - - {fileID: 4357617254700176} - - {fileID: 4597992300155750} - - {fileID: 4558216715397324} - - {fileID: 4744178914414132} - - {fileID: 4033662013957368} - - {fileID: 4187195150774660} - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4955322665183280 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1629945218469052} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: -0.33826} - m_LocalScale: {x: 0.37466624, y: 0.38109082, z: 1.3355573} - m_Children: [] - m_Father: {fileID: 4187195150774660} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4997899861289026 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1065283608971268} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0, y: -0.18299997, z: 0.50040054} - m_LocalScale: {x: 0.27602, y: 0.042489994, z: 0.13891} - m_Children: [] - m_Father: {fileID: 4366959230000766} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!20 &20431662029305678 -Camera: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1708721838681596} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.5043253, g: 0.5998091, b: 0.64705884, a: 0} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.1 - far clip plane: 5000 - field of view: 60 - orthographic: 0 - orthographic size: 10 - m_Depth: 2 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!20 &20594504633560562 -Camera: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1502887428953764} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.46666667, g: 0.5647059, b: 0.60784316, a: 1} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294950911 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!23 &23037714890468710 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1065283608971268} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23100965949668260 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1266065233787350} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 69fefdd39d2b34b169e921910bed9c0d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23115474905733760 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1495070535419640} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: d3855c73a8adc453f89d8df66068f21f, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23135768181633000 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1163818123550610} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 69fefdd39d2b34b169e921910bed9c0d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23205293012073254 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1174787094534024} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23206137672525222 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1202355998263646} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23216873304066102 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1370603211015682} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23271215106937320 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1629945218469052} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 69fefdd39d2b34b169e921910bed9c0d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23317764117839544 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1313448816330826} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23318338541939958 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1686476281424084} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23453242353443288 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1996896798508238} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23487091801190124 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1320526924683862} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23514181277034074 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1260402489684410} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23523448984590452 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1925469033031814} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 69fefdd39d2b34b169e921910bed9c0d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23568730133498752 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1703917813165754} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: c67450f290f3e4897bc40276a619e78d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23766549631743912 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1077929938690272} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: c67450f290f3e4897bc40276a619e78d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23993433136005922 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1637967317274960} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 10303, guid: 0000000000000000f000000000000000, type: 0} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!33 &33086421009090170 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1065283608971268} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33143746929301160 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1686476281424084} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33189171670298254 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1996896798508238} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33243712784099842 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1320526924683862} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33256516820199636 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1163818123550610} - m_Mesh: {fileID: 4300000, guid: 809601725d53c41fb9c7a75071bfbf51, type: 3} ---- !u!33 &33262043711115170 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1925469033031814} - m_Mesh: {fileID: 4300000, guid: df826ba32791e458caefe83b4498ce35, type: 3} ---- !u!33 &33316394158339706 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1077929938690272} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33415730187217894 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1495070535419640} - m_Mesh: {fileID: 4300000, guid: e36ed69b3747404418ab8087954fe038, type: 3} ---- !u!33 &33468197734618918 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1637967317274960} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33476379338443586 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1370603211015682} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33494227520390422 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1629945218469052} - m_Mesh: {fileID: 4300000, guid: 809601725d53c41fb9c7a75071bfbf51, type: 3} ---- !u!33 &33546418793821296 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1260402489684410} - m_Mesh: {fileID: 4300000, guid: 9bf69916dc93a284e857ff6ffa070331, type: 3} ---- !u!33 &33611674429781984 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1703917813165754} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33643559530666352 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1313448816330826} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33828284022210772 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1266065233787350} - m_Mesh: {fileID: 4300000, guid: df826ba32791e458caefe83b4498ce35, type: 3} ---- !u!33 &33982971723741372 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1202355998263646} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33996964478850816 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1174787094534024} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!54 &54008369775390722 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1448428737387446} - serializedVersion: 2 - m_Mass: 25 - m_Drag: 2 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 80 - m_CollisionDetection: 2 ---- !u!64 &64625602348965916 -MeshCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1260402489684410} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 3 - m_Convex: 0 - m_CookingOptions: 14 - m_SkinWidth: 0.01 - m_Mesh: {fileID: 4300000, guid: 9bf69916dc93a284e857ff6ffa070331, type: 3} ---- !u!65 &65047679382380448 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1370603211015682} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65066203413401102 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1320526924683862} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65179230020656150 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1637967317274960} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65453478739237514 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1495070535419640} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 2000, y: 100, z: 5000} - m_Center: {x: -1.5001221, y: -50, z: 0} ---- !u!65 &65502930704067228 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1077929938690272} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65586508290816660 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1686476281424084} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65619555895512540 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1448428737387446} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65793315742155134 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1703917813165754} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!114 &114351483917802064 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1448428737387446} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: b446afae240924105b36d07e8d17a608, type: 3} - m_Name: - m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 51f870f0190b643adae5432c0e6205e7, type: 2} - agentParameters: - agentCameras: [] - agentRenderTextures: [] - maxStep: 3000 - resetOnDone: 1 - onDemandDecision: 0 - numberOfActionsBetweenDecisions: 6 - ground: {fileID: 1495070535419640} - area: {fileID: 1734350477348694} - symbolOGoal: {fileID: 1703917813165754} - symbolXGoal: {fileID: 1077929938690272} - symbolO: {fileID: 1320526924683862} - symbolX: {fileID: 1370603211015682} - useVectorObs: 1 ---- !u!114 &114530730548664348 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1448428737387446} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} - m_Name: - m_EditorClassIdentifier: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/TeacherSymbolFinderArea.prefab.meta b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/TeacherSymbolFinderArea.prefab.meta deleted file mode 100644 index a2d4477763..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/TeacherSymbolFinderArea.prefab.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: ce3434d96b87c40c6a765e6d78da40cd -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 100100000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/VisualSymbolFinderArea.prefab b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/VisualSymbolFinderArea.prefab index dca7b0d7eb..37e81d0627 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/VisualSymbolFinderArea.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Prefabs/VisualSymbolFinderArea.prefab @@ -158,7 +158,9 @@ GameObject: - component: {fileID: 4291041439716878} - component: {fileID: 65678389736547598} - component: {fileID: 54606255118850520} + - component: {fileID: 114090834606594908} - component: {fileID: 114451776683649118} + - component: {fileID: 114065716362190190} m_Layer: 0 m_Name: Agent m_TagString: agent @@ -1848,6 +1850,43 @@ BoxCollider: serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} +--- !u!114 &114065716362190190 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1234267001558658} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 282f342c2ab144bf38be65d4d0c4e07d, type: 3} + m_Name: + m_EditorClassIdentifier: + camera: {fileID: 20961984019151212} + sensorName: CameraSensor + width: 84 + height: 84 + grayscale: 0 +--- !u!114 &114090834606594908 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1234267001558658} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 0 + numStackedVectorObservations: 1 + vectorActionSize: 05000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 0} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: VisualHallway --- !u!114 &114451776683649118 MonoBehaviour: m_ObjectHideFlags: 1 @@ -1859,11 +1898,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: b446afae240924105b36d07e8d17a608, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: a36aad05c06144991a0a5e87de40d003, type: 2} agentParameters: - agentCameras: - - {fileID: 20961984019151212} - agentRenderTextures: [] maxStep: 3000 resetOnDone: 1 onDemandDecision: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scenes/Hallway.unity b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scenes/Hallway.unity index 3caf200215..65bd1c9fa9 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scenes/Hallway.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scenes/Hallway.unity @@ -1140,10 +1140,8 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: broadcastHub: - broadcastingBrains: + brainsToControl: - {fileID: 11400000, guid: 533f2edd327794ca996d0320901b501c, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 m_TrainingConfiguration: width: 128 height: 128 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scenes/HallwayIL.unity b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scenes/HallwayIL.unity deleted file mode 100644 index af5b451ff8..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scenes/HallwayIL.unity +++ /dev/null @@ -1,653 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!29 &1 -OcclusionCullingSettings: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_OcclusionBakeSettings: - smallestOccluder: 5 - smallestHole: 0.25 - backfaceThreshold: 100 - m_SceneGUID: 00000000000000000000000000000000 - m_OcclusionCullingData: {fileID: 0} ---- !u!104 &2 -RenderSettings: - m_ObjectHideFlags: 0 - serializedVersion: 8 - m_Fog: 0 - m_FogColor: {r: 0.5, g: 0.5, b: 0.5, a: 1} - m_FogMode: 3 - m_FogDensity: 0.01 - m_LinearFogStart: 0 - m_LinearFogEnd: 300 - m_AmbientSkyColor: {r: 0.8, g: 0.8, b: 0.8, a: 1} - m_AmbientEquatorColor: {r: 0.114, g: 0.125, b: 0.133, a: 1} - m_AmbientGroundColor: {r: 0.047, g: 0.043, b: 0.035, a: 1} - m_AmbientIntensity: 1 - m_AmbientMode: 3 - m_SubtractiveShadowColor: {r: 0.42, g: 0.478, b: 0.627, a: 1} - m_SkyboxMaterial: {fileID: 0} - m_HaloStrength: 0.5 - m_FlareStrength: 1 - m_FlareFadeSpeed: 3 - m_HaloTexture: {fileID: 0} - m_SpotCookie: {fileID: 10001, guid: 0000000000000000e000000000000000, type: 0} - m_DefaultReflectionMode: 0 - m_DefaultReflectionResolution: 128 - m_ReflectionBounces: 1 - m_ReflectionIntensity: 1 - m_CustomReflection: {fileID: 0} - m_Sun: {fileID: 0} - m_IndirectSpecularColor: {r: 0, g: 0, b: 0, a: 1} ---- !u!157 &3 -LightmapSettings: - m_ObjectHideFlags: 0 - serializedVersion: 11 - m_GIWorkflowMode: 0 - m_GISettings: - serializedVersion: 2 - m_BounceScale: 1 - m_IndirectOutputScale: 1 - m_AlbedoBoost: 1 - m_TemporalCoherenceThreshold: 1 - m_EnvironmentLightingMode: 0 - m_EnableBakedLightmaps: 1 - m_EnableRealtimeLightmaps: 1 - m_LightmapEditorSettings: - serializedVersion: 9 - m_Resolution: 2 - m_BakeResolution: 40 - m_TextureWidth: 1024 - m_TextureHeight: 1024 - m_AO: 0 - m_AOMaxDistance: 1 - m_CompAOExponent: 1 - m_CompAOExponentDirect: 0 - m_Padding: 2 - m_LightmapParameters: {fileID: 0} - m_LightmapsBakeMode: 1 - m_TextureCompression: 1 - m_FinalGather: 0 - m_FinalGatherFiltering: 1 - m_FinalGatherRayCount: 256 - m_ReflectionCompression: 2 - m_MixedBakeMode: 2 - m_BakeBackend: 0 - m_PVRSampling: 1 - m_PVRDirectSampleCount: 32 - m_PVRSampleCount: 500 - m_PVRBounces: 2 - m_PVRFilterTypeDirect: 0 - m_PVRFilterTypeIndirect: 0 - m_PVRFilterTypeAO: 0 - m_PVRFilteringMode: 1 - m_PVRCulling: 1 - m_PVRFilteringGaussRadiusDirect: 1 - m_PVRFilteringGaussRadiusIndirect: 5 - m_PVRFilteringGaussRadiusAO: 2 - m_PVRFilteringAtrousPositionSigmaDirect: 0.5 - m_PVRFilteringAtrousPositionSigmaIndirect: 2 - m_PVRFilteringAtrousPositionSigmaAO: 1 - m_ShowResolutionOverlay: 1 - m_LightingDataAsset: {fileID: 112000002, guid: 03723c7f910c3423aa1974f1b9ce8392, - type: 2} - m_UseShadowmask: 1 ---- !u!196 &4 -NavMeshSettings: - serializedVersion: 2 - m_ObjectHideFlags: 0 - m_BuildSettings: - serializedVersion: 2 - agentTypeID: 0 - agentRadius: 0.5 - agentHeight: 2 - agentSlope: 45 - agentClimb: 0.4 - ledgeDropHeight: 0 - maxJumpAcrossDistance: 0 - minRegionArea: 2 - manualCellSize: 0 - cellSize: 0.16666667 - manualTileSize: 0 - tileSize: 256 - accuratePlacement: 0 - debug: - m_Flags: 0 - m_NavMeshData: {fileID: 0} ---- !u!1001 &95311345 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 4935900384141556, guid: ce3434d96b87c40c6a765e6d78da40cd, type: 2} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4935900384141556, guid: ce3434d96b87c40c6a765e6d78da40cd, type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4935900384141556, guid: ce3434d96b87c40c6a765e6d78da40cd, type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4935900384141556, guid: ce3434d96b87c40c6a765e6d78da40cd, type: 2} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4935900384141556, guid: ce3434d96b87c40c6a765e6d78da40cd, type: 2} - propertyPath: m_LocalRotation.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4935900384141556, guid: ce3434d96b87c40c6a765e6d78da40cd, type: 2} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4935900384141556, guid: ce3434d96b87c40c6a765e6d78da40cd, type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4935900384141556, guid: ce3434d96b87c40c6a765e6d78da40cd, type: 2} - propertyPath: m_RootOrder - value: 6 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: ce3434d96b87c40c6a765e6d78da40cd, type: 2} - m_IsPrefabParent: 0 ---- !u!1001 &121098826 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 4533847489817540, guid: 344c35f795e0c4641991cc5b96547e6d, type: 2} - propertyPath: m_LocalPosition.x - value: 27 - objectReference: {fileID: 0} - - target: {fileID: 4533847489817540, guid: 344c35f795e0c4641991cc5b96547e6d, type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4533847489817540, guid: 344c35f795e0c4641991cc5b96547e6d, type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4533847489817540, guid: 344c35f795e0c4641991cc5b96547e6d, type: 2} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4533847489817540, guid: 344c35f795e0c4641991cc5b96547e6d, type: 2} - propertyPath: m_LocalRotation.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4533847489817540, guid: 344c35f795e0c4641991cc5b96547e6d, type: 2} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4533847489817540, guid: 344c35f795e0c4641991cc5b96547e6d, type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4533847489817540, guid: 344c35f795e0c4641991cc5b96547e6d, type: 2} - propertyPath: m_RootOrder - value: 7 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 344c35f795e0c4641991cc5b96547e6d, type: 2} - m_IsPrefabParent: 0 ---- !u!1 &255077123 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 255077126} - - component: {fileID: 255077125} - - component: {fileID: 255077124} - m_Layer: 0 - m_Name: EventSystem - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!114 &255077124 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 255077123} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 1077351063, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_HorizontalAxis: Horizontal - m_VerticalAxis: Vertical - m_SubmitButton: Submit - m_CancelButton: Cancel - m_InputActionsPerSecond: 10 - m_RepeatDelay: 0.5 - m_ForceModuleActive: 0 ---- !u!114 &255077125 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 255077123} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: -619905303, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_FirstSelected: {fileID: 0} - m_sendNavigationEvents: 1 - m_DragThreshold: 5 ---- !u!4 &255077126 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 255077123} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &318490716 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 318490717} - m_Layer: 0 - m_Name: UnityEngine-Recorder - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &318490717 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 318490716} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 365376271} - - {fileID: 1265651286} - m_Father: {fileID: 0} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &365376270 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 365376271} - m_Layer: 0 - m_Name: Settings - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &365376271 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 365376270} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 1257687049} - m_Father: {fileID: 318490717} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1001 &631219891 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_RootOrder - value: 3 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchoredPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchoredPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_SizeDelta.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_SizeDelta.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMin.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMin.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMax.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMax.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_Pivot.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_Pivot.y - value: 0 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 3ce107b4a79bc4eef83afde434932a68, type: 2} - m_IsPrefabParent: 0 ---- !u!1 &1257687048 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1257687049} - m_Layer: 0 - m_Name: 50bfc0f4c3d6f46df98d3c66ceb89209 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &1257687049 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1257687048} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 365376271} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &1265651285 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1265651286} - m_Layer: 0 - m_Name: RecordingSessions - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &1265651286 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1265651285} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 318490717} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &1319872499 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1319872503} - - component: {fileID: 1319872502} - m_Layer: 0 - m_Name: Camera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!20 &1319872502 -Camera: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1319872499} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 1 - m_BackGroundColor: {r: 0.46666667, g: 0.5647059, b: 0.60784316, a: 1} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 1 - orthographic size: 27.3 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!4 &1319872503 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1319872499} - m_LocalRotation: {x: 0.35355338, y: -0.35355338, z: 0.1464466, w: 0.8535535} - m_LocalPosition: {x: 33, y: 30, z: -18.5} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 5 - m_LocalEulerAnglesHint: {x: 45, y: -45, z: 0} ---- !u!1 &1574236047 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1574236049} - - component: {fileID: 1574236048} - m_Layer: 0 - m_Name: Academy - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!114 &1574236048 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1574236047} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 40db664a3061b46a0a0628f90b2264f7, type: 3} - m_Name: - m_EditorClassIdentifier: - broadcastHub: - broadcastingBrains: - - {fileID: 11400000, guid: 533f2edd327794ca996d0320901b501c, type: 2} - - {fileID: 11400000, guid: 51f870f0190b643adae5432c0e6205e7, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 - m_TrainingConfiguration: - width: 128 - height: 128 - qualityLevel: 0 - timeScale: 20 - targetFrameRate: -1 - m_InferenceConfiguration: - width: 1280 - height: 720 - qualityLevel: 5 - timeScale: 1 - targetFrameRate: 60 - resetParameters: - m_ResetParameters: [] - agentRunSpeed: 1.5 - agentRotationSpeed: 1 - goalScoredMaterial: {fileID: 2100000, guid: df32cc593804f42df97464dc455057b8, type: 2} - failMaterial: {fileID: 2100000, guid: a1daf31cdf41e484ca9ac33a5c6f524a, type: 2} - gravityMultiplier: 2 ---- !u!4 &1574236049 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1574236047} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1001 &1745169174 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.x - value: 106.38621 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.y - value: 38.840767 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.z - value: 34.72934 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.x - value: 0.18587677 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.y - value: -0.7888064 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.z - value: 0.28710198 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.w - value: 0.51069236 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_RootOrder - value: 2 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalEulerAnglesHint.y - value: -114.16 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - m_IsPrefabParent: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scenes/HallwayIL.unity.meta b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scenes/HallwayIL.unity.meta deleted file mode 100644 index 570851967c..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scenes/HallwayIL.unity.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: 5be1eb3996f96423d81321a4ca613466 -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scenes/VisualHallway.unity b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scenes/VisualHallway.unity index 5c2b1ef611..65cdbdf077 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scenes/VisualHallway.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scenes/VisualHallway.unity @@ -167,9 +167,19 @@ Prefab: value: objectReference: {fileID: 11400000, guid: fe56dd72ed38a4c2fb5419aba1e2d5f2, type: 2} + - target: {fileID: 114516857402348526, guid: f2281a3adc3e640b490f89407c2e12d1, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 32612447} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: f2281a3adc3e640b490f89407c2e12d1, type: 2} m_IsPrefabParent: 0 +--- !u!20 &32612447 stripped +Camera: + m_PrefabParentObject: {fileID: 20309822448307506, guid: f2281a3adc3e640b490f89407c2e12d1, + type: 2} + m_PrefabInternal: {fileID: 32612446} --- !u!1 &255077123 GameObject: m_ObjectHideFlags: 0 @@ -315,9 +325,19 @@ Prefab: value: objectReference: {fileID: 11400000, guid: fe56dd72ed38a4c2fb5419aba1e2d5f2, type: 2} + - target: {fileID: 114516857402348526, guid: f2281a3adc3e640b490f89407c2e12d1, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 341018563} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: f2281a3adc3e640b490f89407c2e12d1, type: 2} m_IsPrefabParent: 0 +--- !u!20 &341018563 stripped +Camera: + m_PrefabParentObject: {fileID: 20309822448307506, guid: f2281a3adc3e640b490f89407c2e12d1, + type: 2} + m_PrefabInternal: {fileID: 341018562} --- !u!1 &365376270 GameObject: m_ObjectHideFlags: 0 @@ -400,9 +420,19 @@ Prefab: value: objectReference: {fileID: 11400000, guid: fe56dd72ed38a4c2fb5419aba1e2d5f2, type: 2} + - target: {fileID: 114516857402348526, guid: f2281a3adc3e640b490f89407c2e12d1, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 721234460} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: f2281a3adc3e640b490f89407c2e12d1, type: 2} m_IsPrefabParent: 0 +--- !u!20 &721234460 stripped +Camera: + m_PrefabParentObject: {fileID: 20309822448307506, guid: f2281a3adc3e640b490f89407c2e12d1, + type: 2} + m_PrefabInternal: {fileID: 721234459} --- !u!1001 &977960505 Prefab: m_ObjectHideFlags: 0 @@ -456,9 +486,19 @@ Prefab: value: objectReference: {fileID: 11400000, guid: fe56dd72ed38a4c2fb5419aba1e2d5f2, type: 2} + - target: {fileID: 114516857402348526, guid: f2281a3adc3e640b490f89407c2e12d1, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 977960506} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: f2281a3adc3e640b490f89407c2e12d1, type: 2} m_IsPrefabParent: 0 +--- !u!20 &977960506 stripped +Camera: + m_PrefabParentObject: {fileID: 20309822448307506, guid: f2281a3adc3e640b490f89407c2e12d1, + type: 2} + m_PrefabInternal: {fileID: 977960505} --- !u!1 &1257687048 GameObject: m_ObjectHideFlags: 0 @@ -661,12 +701,6 @@ Prefab: propertyPath: m_RootOrder value: 6 objectReference: {fileID: 0} - - target: {fileID: 114451776683649118, guid: f2281a3adc3e640b490f89407c2e12d1, - type: 2} - propertyPath: brain - value: - objectReference: {fileID: 11400000, guid: fe56dd72ed38a4c2fb5419aba1e2d5f2, - type: 2} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: f2281a3adc3e640b490f89407c2e12d1, type: 2} m_IsPrefabParent: 0 @@ -723,9 +757,19 @@ Prefab: value: objectReference: {fileID: 11400000, guid: fe56dd72ed38a4c2fb5419aba1e2d5f2, type: 2} + - target: {fileID: 114516857402348526, guid: f2281a3adc3e640b490f89407c2e12d1, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 1388008249} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: f2281a3adc3e640b490f89407c2e12d1, type: 2} m_IsPrefabParent: 0 +--- !u!20 &1388008249 stripped +Camera: + m_PrefabParentObject: {fileID: 20309822448307506, guid: f2281a3adc3e640b490f89407c2e12d1, + type: 2} + m_PrefabInternal: {fileID: 1388008248} --- !u!1001 &1436760868 Prefab: m_ObjectHideFlags: 0 @@ -779,9 +823,19 @@ Prefab: value: objectReference: {fileID: 11400000, guid: fe56dd72ed38a4c2fb5419aba1e2d5f2, type: 2} + - target: {fileID: 114516857402348526, guid: f2281a3adc3e640b490f89407c2e12d1, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 1436760869} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: f2281a3adc3e640b490f89407c2e12d1, type: 2} m_IsPrefabParent: 0 +--- !u!20 &1436760869 stripped +Camera: + m_PrefabParentObject: {fileID: 20309822448307506, guid: f2281a3adc3e640b490f89407c2e12d1, + type: 2} + m_PrefabInternal: {fileID: 1436760868} --- !u!1 &1574236047 GameObject: m_ObjectHideFlags: 0 @@ -809,10 +863,6 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 40db664a3061b46a0a0628f90b2264f7, type: 3} m_Name: m_EditorClassIdentifier: - broadcastHub: - broadcastingBrains: - - {fileID: 11400000, guid: fe56dd72ed38a4c2fb5419aba1e2d5f2, type: 2} - m_BrainsToControl: [] m_TrainingConfiguration: width: 128 height: 128 @@ -898,9 +948,19 @@ Prefab: value: objectReference: {fileID: 11400000, guid: fe56dd72ed38a4c2fb5419aba1e2d5f2, type: 2} + - target: {fileID: 114516857402348526, guid: f2281a3adc3e640b490f89407c2e12d1, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 1746153507} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: f2281a3adc3e640b490f89407c2e12d1, type: 2} m_IsPrefabParent: 0 +--- !u!20 &1746153507 stripped +Camera: + m_PrefabParentObject: {fileID: 20309822448307506, guid: f2281a3adc3e640b490f89407c2e12d1, + type: 2} + m_PrefabInternal: {fileID: 1746153506} --- !u!1001 &2025898844 Prefab: m_ObjectHideFlags: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scripts/HallwayAgent.cs b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scripts/HallwayAgent.cs index 7da758ce85..5912ce2521 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scripts/HallwayAgent.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/Hallway/Scripts/HallwayAgent.cs @@ -52,29 +52,21 @@ public void MoveAgent(float[] act) var dirToGo = Vector3.zero; var rotateDir = Vector3.zero; - if (brain.brainParameters.vectorActionSpaceType == SpaceType.Continuous) + var action = Mathf.FloorToInt(act[0]); + switch (action) { - dirToGo = transform.forward * Mathf.Clamp(act[0], -1f, 1f); - rotateDir = transform.up * Mathf.Clamp(act[1], -1f, 1f); - } - else - { - var action = Mathf.FloorToInt(act[0]); - switch (action) - { - case 1: - dirToGo = transform.forward * 1f; - break; - case 2: - dirToGo = transform.forward * -1f; - break; - case 3: - rotateDir = transform.up * 1f; - break; - case 4: - rotateDir = transform.up * -1f; - break; - } + case 1: + dirToGo = transform.forward * 1f; + break; + case 2: + dirToGo = transform.forward * -1f; + break; + case 3: + rotateDir = transform.up * 1f; + break; + case 4: + rotateDir = transform.up * -1f; + break; } transform.Rotate(rotateDir, Time.deltaTime * 150f); m_AgentRb.AddForce(dirToGo * m_Academy.agentRunSpeed, ForceMode.VelocityChange); @@ -105,6 +97,27 @@ void OnCollisionEnter(Collision col) } } + public override float[] Heuristic() + { + if (Input.GetKey(KeyCode.D)) + { + return new float[] { 3 }; + } + if (Input.GetKey(KeyCode.W)) + { + return new float[] { 1 }; + } + if (Input.GetKey(KeyCode.A)) + { + return new float[] { 4 }; + } + if (Input.GetKey(KeyCode.S)) + { + return new float[] { 2 }; + } + return new float[] { 0 }; + } + public override void AgentReset() { var agentOffset = -15f; diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/TFModels/Hallway.nn b/UnitySDK/Assets/ML-Agents/Examples/Hallway/TFModels/Hallway.nn new file mode 100644 index 0000000000..c80aaa91b2 Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/Hallway/TFModels/Hallway.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/TFModels/Hallway.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Hallway/TFModels/Hallway.nn.meta new file mode 100644 index 0000000000..400a019729 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Examples/Hallway/TFModels/Hallway.nn.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 317f4f8da7e4846b3aae0969781824a2 +ScriptedImporter: + userData: + assetBundleName: + assetBundleVariant: + script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/TFModels/HallwayLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/Hallway/TFModels/HallwayLearning.nn deleted file mode 100644 index 23ea9b4619..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/Hallway/TFModels/HallwayLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Hallway/TFModels/HallwayLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Hallway/TFModels/HallwayLearning.nn.meta deleted file mode 100644 index 6a59ffed96..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Hallway/TFModels/HallwayLearning.nn.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: b3f3b601fa5e84185862261041525ea9 -ScriptedImporter: - userData: - assetBundleName: - assetBundleVariant: - script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains.meta b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains.meta deleted file mode 100644 index 99f461b54f..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: cd9aa4a8132f543f79360f7342092928 -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/PushBlockLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/PushBlockLearning.asset deleted file mode 100644 index 06f488db17..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/PushBlockLearning.asset +++ /dev/null @@ -1,23 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: PushBlockLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 70 - numStackedVectorObservations: 3 - vectorActionSize: 07000000 - cameraResolutions: [] - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - model: {fileID: 11400000, guid: c60a63ad5dc0c4a029d7360054667457, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/PushBlockLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/PushBlockLearning.asset.meta deleted file mode 100644 index 3fe50d7b2d..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/PushBlockLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: e8b2d719f6a324b1abb68d8cf2859f5c -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/PushBlockPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/PushBlockPlayer.asset deleted file mode 100644 index 656a4fa28f..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/PushBlockPlayer.asset +++ /dev/null @@ -1,36 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: PushBlockPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 70 - numStackedVectorObservations: 3 - vectorActionSize: 07000000 - cameraResolutions: [] - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - keyContinuousPlayerActions: [] - axisContinuousPlayerActions: [] - discretePlayerActions: - - key: 119 - branchIndex: 0 - value: 1 - - key: 115 - branchIndex: 0 - value: 2 - - key: 100 - branchIndex: 0 - value: 3 - - key: 97 - branchIndex: 0 - value: 4 diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/PushBlockPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/PushBlockPlayer.asset.meta deleted file mode 100644 index 8700c6334d..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/PushBlockPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: dd07b1953eac4411b81fba032f394726 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/VisualPushBlockLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/VisualPushBlockLearning.asset deleted file mode 100644 index 2bfe7a12ab..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/VisualPushBlockLearning.asset +++ /dev/null @@ -1,26 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: VisualPushBlockLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 0 - numStackedVectorObservations: 1 - vectorActionSize: 07000000 - cameraResolutions: - - width: 84 - height: 84 - blackAndWhite: 0 - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - model: {fileID: 0} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/VisualPushBlockLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/VisualPushBlockLearning.asset.meta deleted file mode 100644 index 396c4d0a90..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/VisualPushBlockLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: d359d2290a825421e930c94284994e3f -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/VisualPushBlockPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/VisualPushBlockPlayer.asset deleted file mode 100644 index 916721bceb..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/VisualPushBlockPlayer.asset +++ /dev/null @@ -1,45 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: VisualPushBlockPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 0 - numStackedVectorObservations: 1 - vectorActionSize: 07000000 - cameraResolutions: - - width: 84 - height: 84 - blackAndWhite: 0 - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - keyContinuousPlayerActions: [] - axisContinuousPlayerActions: [] - discretePlayerActions: - - key: 97 - branchIndex: 0 - value: 4 - - key: 100 - branchIndex: 0 - value: 3 - - key: 119 - branchIndex: 0 - value: 1 - - key: 115 - branchIndex: 0 - value: 2 - - key: 113 - branchIndex: 0 - value: 5 - - key: 101 - branchIndex: 0 - value: 6 diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/VisualPushBlockPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/VisualPushBlockPlayer.asset.meta deleted file mode 100644 index 79a79a2a0e..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Brains/VisualPushBlockPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: cc62140bff6494e0399caaed0b56020d -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockArea.prefab b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockArea.prefab index b371c6c3fb..fde9a4ad4c 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockArea.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockArea.prefab @@ -104,6 +104,7 @@ GameObject: m_Component: - component: {fileID: 4188187884171146} - component: {fileID: 54817351390947638} + - component: {fileID: 114306175693660464} - component: {fileID: 114505490781873732} - component: {fileID: 114421647563711602} - component: {fileID: 65880096262939968} @@ -933,6 +934,27 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: agent: {fileID: 0} +--- !u!114 &114306175693660464 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1489716781518988} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 70 + numStackedVectorObservations: 3 + vectorActionSize: 07000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 11400000, guid: 70db47ab276e44fe0beb677ff8d69382, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: PushBlock --- !u!114 &114421647563711602 MonoBehaviour: m_ObjectHideFlags: 1 @@ -955,10 +977,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: dea8c4f2604b947e6b7b97750dde87ca, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: e8b2d719f6a324b1abb68d8cf2859f5c, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockStudentArea.prefab b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockStudentArea.prefab deleted file mode 100644 index ba4e0b6f53..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockStudentArea.prefab +++ /dev/null @@ -1,974 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!1001 &100100000 -Prefab: - m_ObjectHideFlags: 1 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: [] - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 0} - m_RootGameObject: {fileID: 1486138403211918} - m_IsPrefabParent: 1 ---- !u!1 &1085869478616300 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4030662671131900} - - component: {fileID: 33087979356008778} - - component: {fileID: 23408361250061780} - - component: {fileID: 65712289772841848} - m_Layer: 0 - m_Name: Ground - m_TagString: ground - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1118257762531670 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4443565957894596} - - component: {fileID: 33483016810675864} - - component: {fileID: 23831584096036484} - m_Layer: 0 - m_Name: mouth - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1174612108545256 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4280309931594182} - - component: {fileID: 54034229665735626} - - component: {fileID: 114529729461792126} - - component: {fileID: 114818793969690340} - - component: {fileID: 65153386324815056} - m_Layer: 0 - m_Name: Agent - m_TagString: agent - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1415827604328980 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4592429442057442} - - component: {fileID: 33840372451923300} - - component: {fileID: 23465817053254436} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1479034566851370 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4009006697408198} - - component: {fileID: 33076284816695044} - - component: {fileID: 23583729267583096} - - component: {fileID: 65770352760645874} - - component: {fileID: 65570048190793940} - - component: {fileID: 65480836019121548} - - component: {fileID: 65103478791235036} - m_Layer: 0 - m_Name: WallsOuter - m_TagString: wall - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1486138403211918 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4533671617941172} - m_Layer: 0 - m_Name: PushBlockStudentArea - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1515121274837440 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4911215992745486} - - component: {fileID: 33759457649512042} - - component: {fileID: 23085929993717832} - m_Layer: 0 - m_Name: AgentCube_Blue - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1546837743142234 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4124049975477908} - - component: {fileID: 33985978872265220} - - component: {fileID: 23822664244603958} - m_Layer: 0 - m_Name: eye - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1574356009921272 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4430649836565100} - - component: {fileID: 33140600210072610} - - component: {fileID: 23582222220945790} - m_Layer: 0 - m_Name: Headband - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1851985311718410 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4325794013734564} - - component: {fileID: 20080230428057848} - m_Layer: 0 - m_Name: AgentCamera - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 0 ---- !u!1 &1990879622835084 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4237244800517280} - - component: {fileID: 33195280502948298} - - component: {fileID: 23464703671882344} - - component: {fileID: 65995918856407652} - m_Layer: 0 - m_Name: Goal - m_TagString: goal - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!1 &1999560460119064 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 4372775582892282} - - component: {fileID: 33801746971264330} - - component: {fileID: 65560130982147748} - - component: {fileID: 23173975500586872} - - component: {fileID: 54111943613874968} - - component: {fileID: 114785351807279256} - m_Layer: 0 - m_Name: Block - m_TagString: block - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &4009006697408198 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1479034566851370} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: 0, z: 0} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 4533671617941172} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4030662671131900 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1085869478616300} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: 0, z: -0.00000030517577} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.01} - m_Children: [] - m_Father: {fileID: 4533671617941172} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4124049975477908 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1546837743142234} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: -0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4911215992745486} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4237244800517280 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1990879622835084} - m_LocalRotation: {x: 0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: -0, y: -0.03, z: -10.5} - m_LocalScale: {x: 0.01, y: 0.01, z: 0.010748733} - m_Children: [] - m_Father: {fileID: 4533671617941172} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4280309931594182 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1174612108545256} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0, y: 1, z: 3} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4911215992745486} - m_Father: {fileID: 4533671617941172} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4325794013734564 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1851985311718410} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0.15} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 4911215992745486} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4372775582892282 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1999560460119064} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 2, y: 1, z: -1.43} - m_LocalScale: {x: 2, y: 0.75, z: 2} - m_Children: [] - m_Father: {fileID: 4533671617941172} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4430649836565100 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1574356009921272} - m_LocalRotation: {x: -0, y: -0, z: 0.016506119, w: 0.9998638} - m_LocalPosition: {x: 0, y: 0.341, z: 0} - m_LocalScale: {x: 1.0441425, y: 0.19278127, z: 1.0441422} - m_Children: [] - m_Father: {fileID: 4911215992745486} - m_RootOrder: 4 - m_LocalEulerAnglesHint: {x: 0, y: -179.99998, z: 1.8920001} ---- !u!4 &4443565957894596 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1118257762531670} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0, y: -0.18299997, z: 0.50040054} - m_LocalScale: {x: 0.27602, y: 0.042489994, z: 0.13891} - m_Children: [] - m_Father: {fileID: 4911215992745486} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4533671617941172 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1486138403211918} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 15, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4280309931594182} - - {fileID: 4237244800517280} - - {fileID: 4030662671131900} - - {fileID: 4009006697408198} - - {fileID: 4372775582892282} - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!4 &4592429442057442 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1415827604328980} - m_LocalRotation: {x: -0, y: 1, z: -0, w: 0} - m_LocalPosition: {x: 0.29999995, y: 0.07399994, z: 0.50040054} - m_LocalScale: {x: 0.29457998, y: 0.29457998, z: 0.29457998} - m_Children: [] - m_Father: {fileID: 4911215992745486} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 180, z: 0} ---- !u!4 &4911215992745486 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1515121274837440} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 4325794013734564} - - {fileID: 4592429442057442} - - {fileID: 4124049975477908} - - {fileID: 4443565957894596} - - {fileID: 4430649836565100} - m_Father: {fileID: 4280309931594182} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!20 &20080230428057848 -Camera: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1851985311718410} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.46666667, g: 0.5647059, b: 0.60784316, a: 1} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 60 - orthographic: 0 - orthographic size: 5 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294950911 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!23 &23085929993717832 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1515121274837440} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: c9fa44c2c3f8ce74ca39a3355ea42631, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23173975500586872 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1999560460119064} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23408361250061780 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1085869478616300} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: acba6bf2a290a496bb8989b42bf8698d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23464703671882344 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1990879622835084} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: c67450f290f3e4897bc40276a619e78d, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23465817053254436 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1415827604328980} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23582222220945790 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1574356009921272} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 04be259c590de46f69db4cbd1da877d5, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23583729267583096 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1479034566851370} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: 66163cf35956a4be08e801b750c26f33, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 0 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23822664244603958 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1546837743142234} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!23 &23831584096036484 -MeshRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1118257762531670} - m_Enabled: 1 - m_CastShadows: 1 - m_ReceiveShadows: 1 - m_DynamicOccludee: 1 - m_MotionVectors: 1 - m_LightProbeUsage: 1 - m_ReflectionProbeUsage: 1 - m_Materials: - - {fileID: 2100000, guid: f731be6866ce749fd8349e67ae81f76a, type: 2} - m_StaticBatchInfo: - firstSubMesh: 0 - subMeshCount: 0 - m_StaticBatchRoot: {fileID: 0} - m_ProbeAnchor: {fileID: 0} - m_LightProbeVolumeOverride: {fileID: 0} - m_ScaleInLightmap: 1 - m_PreserveUVs: 1 - m_IgnoreNormalsForChartDetection: 0 - m_ImportantGI: 0 - m_StitchLightmapSeams: 0 - m_SelectedEditorRenderState: 3 - m_MinimumChartSize: 4 - m_AutoUVMaxDistance: 0.5 - m_AutoUVMaxAngle: 89 - m_LightmapParameters: {fileID: 0} - m_SortingLayerID: 0 - m_SortingLayer: 0 - m_SortingOrder: 0 ---- !u!33 &33076284816695044 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1479034566851370} - m_Mesh: {fileID: 4300000, guid: c639386c12f5f7841892163a199dfacc, type: 3} ---- !u!33 &33087979356008778 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1085869478616300} - m_Mesh: {fileID: 4300002, guid: c639386c12f5f7841892163a199dfacc, type: 3} ---- !u!33 &33140600210072610 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1574356009921272} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33195280502948298 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1990879622835084} - m_Mesh: {fileID: 4300004, guid: c639386c12f5f7841892163a199dfacc, type: 3} ---- !u!33 &33483016810675864 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1118257762531670} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33759457649512042 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1515121274837440} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33801746971264330 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1999560460119064} - m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33840372451923300 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1415827604328980} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!33 &33985978872265220 -MeshFilter: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1546837743142234} - m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0} ---- !u!54 &54034229665735626 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1174612108545256} - serializedVersion: 2 - m_Mass: 10 - m_Drag: 4 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 112 - m_CollisionDetection: 0 ---- !u!54 &54111943613874968 -Rigidbody: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1999560460119064} - serializedVersion: 2 - m_Mass: 10 - m_Drag: 0.5 - m_AngularDrag: 0.05 - m_UseGravity: 1 - m_IsKinematic: 0 - m_Interpolate: 0 - m_Constraints: 112 - m_CollisionDetection: 0 ---- !u!65 &65103478791235036 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1479034566851370} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 2500, y: 200, z: 50} - m_Center: {x: 0, y: 50, z: 1275} ---- !u!65 &65153386324815056 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1174612108545256} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65480836019121548 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1479034566851370} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 2500, y: 200, z: 50} - m_Center: {x: 0, y: 50, z: -1275} ---- !u!65 &65560130982147748 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1999560460119064} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 1, y: 1, z: 1} - m_Center: {x: 0, y: 0, z: 0} ---- !u!65 &65570048190793940 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1479034566851370} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 50, y: 200, z: 2600} - m_Center: {x: 1275, y: 50, z: 0} ---- !u!65 &65712289772841848 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1085869478616300} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 2500, y: 100, z: 2500} - m_Center: {x: 0, y: -50, z: 0} ---- !u!65 &65770352760645874 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1479034566851370} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 50, y: 200, z: 2600} - m_Center: {x: -1275, y: 50, z: 0} ---- !u!65 &65995918856407652 -BoxCollider: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1990879622835084} - m_Material: {fileID: 0} - m_IsTrigger: 0 - m_Enabled: 1 - serializedVersion: 2 - m_Size: {x: 2500, y: 5, z: 400} - m_Center: {x: 0, y: 2.5, z: 0} ---- !u!114 &114529729461792126 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1174612108545256} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: dea8c4f2604b947e6b7b97750dde87ca, type: 3} - m_Name: - m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: e8b2d719f6a324b1abb68d8cf2859f5c, type: 2} - agentParameters: - agentCameras: [] - agentRenderTextures: [] - maxStep: 5000 - resetOnDone: 1 - onDemandDecision: 0 - numberOfActionsBetweenDecisions: 5 - ground: {fileID: 0} - area: {fileID: 0} - areaBounds: - m_Center: {x: 0, y: 0, z: 0} - m_Extent: {x: 0, y: 0, z: 0} - goal: {fileID: 0} - block: {fileID: 0} - goalDetect: {fileID: 0} - useVectorObs: 1 ---- !u!114 &114785351807279256 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1999560460119064} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 7d079d09ceed84ff49cf6841c66cf7ec, type: 3} - m_Name: - m_EditorClassIdentifier: - agent: {fileID: 0} ---- !u!114 &114818793969690340 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1174612108545256} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} - m_Name: - m_EditorClassIdentifier: diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockStudentArea.prefab.meta b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockStudentArea.prefab.meta deleted file mode 100644 index 29ed4fe324..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockStudentArea.prefab.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 0ccbba9c7befb48aabfcfb854dbdd852 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 100100000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockTeacherArea.prefab.meta b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockTeacherArea.prefab.meta deleted file mode 100644 index 9f6b397c8e..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockTeacherArea.prefab.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 576f6670c9af14b90bc8f418d0e6c94d -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 100100000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockVisualArea.prefab b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockVisualArea.prefab index 77464bc02f..eeb904e1ad 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockVisualArea.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Prefabs/PushBlockVisualArea.prefab @@ -179,9 +179,10 @@ GameObject: m_Component: - component: {fileID: 4456685767774680} - component: {fileID: 54790445914364846} + - component: {fileID: 114923027571458262} - component: {fileID: 114812843792483960} - - component: {fileID: 114650520402303970} - component: {fileID: 65891831092422300} + - component: {fileID: 114505118440755634} m_Layer: 0 m_Name: Agent m_TagString: agent @@ -1209,7 +1210,7 @@ BoxCollider: serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} ---- !u!114 &114650520402303970 +--- !u!114 &114505118440755634 MonoBehaviour: m_ObjectHideFlags: 1 m_PrefabParentObject: {fileID: 0} @@ -1217,9 +1218,14 @@ MonoBehaviour: m_GameObject: {fileID: 1626010291821672} m_Enabled: 1 m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} + m_Script: {fileID: 11500000, guid: 282f342c2ab144bf38be65d4d0c4e07d, type: 3} m_Name: m_EditorClassIdentifier: + camera: {fileID: 20961401228419460} + sensorName: CameraSensor + width: 84 + height: 84 + grayscale: 0 --- !u!114 &114690277332619348 MonoBehaviour: m_ObjectHideFlags: 1 @@ -1243,11 +1249,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: dea8c4f2604b947e6b7b97750dde87ca, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: cc62140bff6494e0399caaed0b56020d, type: 2} agentParameters: - agentCameras: - - {fileID: 20961401228419460} - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 @@ -1261,3 +1263,24 @@ MonoBehaviour: block: {fileID: 1609037632005304} goalDetect: {fileID: 0} useVectorObs: 0 +--- !u!114 &114923027571458262 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1626010291821672} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 0 + numStackedVectorObservations: 1 + vectorActionSize: 07000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 0} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: VisualHallway diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scenes/PushBlock.unity b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scenes/PushBlock.unity index 234ca4db7d..ea6069b825 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scenes/PushBlock.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scenes/PushBlock.unity @@ -1672,10 +1672,8 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: broadcastHub: - broadcastingBrains: + brainsToControl: - {fileID: 11400000, guid: e8b2d719f6a324b1abb68d8cf2859f5c, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 m_TrainingConfiguration: width: 400 height: 300 diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scenes/PushBlockIL.unity b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scenes/PushBlockIL.unity deleted file mode 100644 index 6ee4cd3ecf..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scenes/PushBlockIL.unity +++ /dev/null @@ -1,714 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!29 &1 -OcclusionCullingSettings: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_OcclusionBakeSettings: - smallestOccluder: 5 - smallestHole: 0.25 - backfaceThreshold: 100 - m_SceneGUID: 00000000000000000000000000000000 - m_OcclusionCullingData: {fileID: 0} ---- !u!104 &2 -RenderSettings: - m_ObjectHideFlags: 0 - serializedVersion: 8 - m_Fog: 0 - m_FogColor: {r: 0.5, g: 0.5, b: 0.5, a: 1} - m_FogMode: 3 - m_FogDensity: 0.01 - m_LinearFogStart: 0 - m_LinearFogEnd: 300 - m_AmbientSkyColor: {r: 0.8, g: 0.8, b: 0.8, a: 1} - m_AmbientEquatorColor: {r: 0.114, g: 0.125, b: 0.133, a: 1} - m_AmbientGroundColor: {r: 0.047, g: 0.043, b: 0.035, a: 1} - m_AmbientIntensity: 1 - m_AmbientMode: 3 - m_SubtractiveShadowColor: {r: 0.42, g: 0.478, b: 0.627, a: 1} - m_SkyboxMaterial: {fileID: 0} - m_HaloStrength: 0.5 - m_FlareStrength: 1 - m_FlareFadeSpeed: 3 - m_HaloTexture: {fileID: 0} - m_SpotCookie: {fileID: 10001, guid: 0000000000000000e000000000000000, type: 0} - m_DefaultReflectionMode: 0 - m_DefaultReflectionResolution: 128 - m_ReflectionBounces: 1 - m_ReflectionIntensity: 1 - m_CustomReflection: {fileID: 0} - m_Sun: {fileID: 0} - m_IndirectSpecularColor: {r: 0, g: 0, b: 0, a: 1} ---- !u!157 &3 -LightmapSettings: - m_ObjectHideFlags: 0 - serializedVersion: 11 - m_GIWorkflowMode: 0 - m_GISettings: - serializedVersion: 2 - m_BounceScale: 1 - m_IndirectOutputScale: 1 - m_AlbedoBoost: 1 - m_TemporalCoherenceThreshold: 1 - m_EnvironmentLightingMode: 0 - m_EnableBakedLightmaps: 1 - m_EnableRealtimeLightmaps: 1 - m_LightmapEditorSettings: - serializedVersion: 9 - m_Resolution: 2 - m_BakeResolution: 40 - m_TextureWidth: 1024 - m_TextureHeight: 1024 - m_AO: 0 - m_AOMaxDistance: 1 - m_CompAOExponent: 1 - m_CompAOExponentDirect: 0 - m_Padding: 2 - m_LightmapParameters: {fileID: 0} - m_LightmapsBakeMode: 1 - m_TextureCompression: 1 - m_FinalGather: 0 - m_FinalGatherFiltering: 1 - m_FinalGatherRayCount: 256 - m_ReflectionCompression: 2 - m_MixedBakeMode: 2 - m_BakeBackend: 0 - m_PVRSampling: 1 - m_PVRDirectSampleCount: 32 - m_PVRSampleCount: 500 - m_PVRBounces: 2 - m_PVRFilterTypeDirect: 0 - m_PVRFilterTypeIndirect: 0 - m_PVRFilterTypeAO: 0 - m_PVRFilteringMode: 1 - m_PVRCulling: 1 - m_PVRFilteringGaussRadiusDirect: 1 - m_PVRFilteringGaussRadiusIndirect: 5 - m_PVRFilteringGaussRadiusAO: 2 - m_PVRFilteringAtrousPositionSigmaDirect: 0.5 - m_PVRFilteringAtrousPositionSigmaIndirect: 2 - m_PVRFilteringAtrousPositionSigmaAO: 1 - m_ShowResolutionOverlay: 1 - m_LightingDataAsset: {fileID: 112000002, guid: 03723c7f910c3423aa1974f1b9ce8392, - type: 2} - m_UseShadowmask: 1 ---- !u!196 &4 -NavMeshSettings: - serializedVersion: 2 - m_ObjectHideFlags: 0 - m_BuildSettings: - serializedVersion: 2 - agentTypeID: 0 - agentRadius: 0.5 - agentHeight: 2 - agentSlope: 45 - agentClimb: 0.4 - ledgeDropHeight: 0 - maxJumpAcrossDistance: 0 - minRegionArea: 2 - manualCellSize: 0 - cellSize: 0.16666667 - manualTileSize: 0 - tileSize: 256 - accuratePlacement: 0 - debug: - m_Flags: 0 - m_NavMeshData: {fileID: 0} ---- !u!1001 &27539394 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_RootOrder - value: 5 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchoredPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchoredPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_SizeDelta.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_SizeDelta.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMin.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMin.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMax.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMax.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_Pivot.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_Pivot.y - value: 0 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 3ce107b4a79bc4eef83afde434932a68, type: 2} - m_IsPrefabParent: 0 ---- !u!1 &137657348 stripped -GameObject: - m_PrefabParentObject: {fileID: 1990879622835084, guid: 0ccbba9c7befb48aabfcfb854dbdd852, - type: 2} - m_PrefabInternal: {fileID: 1733586127} ---- !u!1 &255077123 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 255077126} - - component: {fileID: 255077125} - - component: {fileID: 255077124} - m_Layer: 0 - m_Name: EventSystem - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!114 &255077124 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 255077123} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 1077351063, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_HorizontalAxis: Horizontal - m_VerticalAxis: Vertical - m_SubmitButton: Submit - m_CancelButton: Cancel - m_InputActionsPerSecond: 10 - m_RepeatDelay: 0.5 - m_ForceModuleActive: 0 ---- !u!114 &255077125 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 255077123} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: -619905303, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_FirstSelected: {fileID: 0} - m_sendNavigationEvents: 1 - m_DragThreshold: 5 ---- !u!4 &255077126 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 255077123} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &318490716 -GameObject: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 318490717} - m_Layer: 0 - m_Name: UnityEngine-Recorder - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &318490717 -Transform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 318490716} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 365376271} - - {fileID: 1265651286} - m_Father: {fileID: 0} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &365376270 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 365376271} - m_Layer: 0 - m_Name: Settings - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &365376271 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 365376270} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 1257687049} - m_Father: {fileID: 318490717} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1001 &568332033 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.x - value: 106.38621 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.y - value: 38.840767 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.z - value: 34.72934 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.x - value: 0.31598538 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.y - value: -0.3596048 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.z - value: 0.13088542 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.w - value: 0.8681629 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_RootOrder - value: 4 - objectReference: {fileID: 0} - - target: {fileID: 65880592586321730, guid: bed6005cc2a1a47edafba27cde6b5538, - type: 2} - propertyPath: m_Material - value: - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - m_IsPrefabParent: 0 ---- !u!1 &1009000883 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1009000884} - - component: {fileID: 1009000887} - m_Layer: 0 - m_Name: OverviewCam - m_TagString: MainCamera - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &1009000884 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1009000883} - m_LocalRotation: {x: 0.5, y: 0, z: 0, w: 0.8660254} - m_LocalPosition: {x: 0, y: 30, z: -20} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 60, y: 0, z: 0} ---- !u!20 &1009000887 -Camera: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1009000883} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.46666667, g: 0.5647059, b: 0.60784316, a: 1} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 70 - orthographic: 0 - orthographic size: 6.98 - m_Depth: 2 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!1 &1198770627 stripped -GameObject: - m_PrefabParentObject: {fileID: 1085869478616300, guid: 0ccbba9c7befb48aabfcfb854dbdd852, - type: 2} - m_PrefabInternal: {fileID: 1733586127} ---- !u!1 &1257687048 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1257687049} - m_Layer: 0 - m_Name: 50bfc0f4c3d6f46df98d3c66ceb89209 - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &1257687049 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1257687048} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 365376271} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &1265651285 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1265651286} - m_Layer: 0 - m_Name: RecordingSessions - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &1265651286 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1265651285} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 318490717} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &1302349068 stripped -GameObject: - m_PrefabParentObject: {fileID: 1486138403211918, guid: 0ccbba9c7befb48aabfcfb854dbdd852, - type: 2} - m_PrefabInternal: {fileID: 1733586127} ---- !u!1001 &1306624900 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 4086282048714158, guid: 576f6670c9af14b90bc8f418d0e6c94d, type: 2} - propertyPath: m_LocalPosition.x - value: -15 - objectReference: {fileID: 0} - - target: {fileID: 4086282048714158, guid: 576f6670c9af14b90bc8f418d0e6c94d, type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4086282048714158, guid: 576f6670c9af14b90bc8f418d0e6c94d, type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4086282048714158, guid: 576f6670c9af14b90bc8f418d0e6c94d, type: 2} - propertyPath: m_LocalRotation.x - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4086282048714158, guid: 576f6670c9af14b90bc8f418d0e6c94d, type: 2} - propertyPath: m_LocalRotation.y - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4086282048714158, guid: 576f6670c9af14b90bc8f418d0e6c94d, type: 2} - propertyPath: m_LocalRotation.z - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4086282048714158, guid: 576f6670c9af14b90bc8f418d0e6c94d, type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4086282048714158, guid: 576f6670c9af14b90bc8f418d0e6c94d, type: 2} - propertyPath: m_RootOrder - value: 6 - objectReference: {fileID: 0} - - target: {fileID: 65043724339313280, guid: 576f6670c9af14b90bc8f418d0e6c94d, - type: 2} - propertyPath: m_Size.y - value: 300 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 576f6670c9af14b90bc8f418d0e6c94d, type: 2} - m_IsPrefabParent: 0 ---- !u!1 &1574236047 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1574236049} - - component: {fileID: 1574236048} - m_Layer: 0 - m_Name: Academy - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!114 &1574236048 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1574236047} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: a2ca406dad5ec4ede8184998f4f9067d, type: 3} - m_Name: - m_EditorClassIdentifier: - broadcastHub: - broadcastingBrains: - - {fileID: 11400000, guid: e8b2d719f6a324b1abb68d8cf2859f5c, type: 2} - - {fileID: 11400000, guid: dd07b1953eac4411b81fba032f394726, type: 2} - m_BrainsToControl: - - {fileID: 11400000, guid: e8b2d719f6a324b1abb68d8cf2859f5c, type: 2} - m_MaxSteps: 0 - m_TrainingConfiguration: - width: 1280 - height: 720 - qualityLevel: 0 - timeScale: 15 - targetFrameRate: 60 - m_InferenceConfiguration: - width: 1280 - height: 720 - qualityLevel: 5 - timeScale: 1 - targetFrameRate: 60 - resetParameters: - m_ResetParameters: - - key: dynamic_friction - value: 0 - - key: static_friction - value: 0 - - key: block_drag - value: 0.5 - - key: block_scale - value: 2 - agentRunSpeed: 2 - agentRotationSpeed: 15 - spawnAreaMarginMultiplier: 0.5 - goalScoredMaterial: {fileID: 2100000, guid: df32cc593804f42df97464dc455057b8, type: 2} - failMaterial: {fileID: 2100000, guid: a1daf31cdf41e484ca9ac33a5c6f524a, type: 2} - gravityMultiplier: 2.5 ---- !u!4 &1574236049 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1574236047} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1001 &1733586127 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 4533671617941172, guid: 0ccbba9c7befb48aabfcfb854dbdd852, type: 2} - propertyPath: m_LocalPosition.x - value: 15 - objectReference: {fileID: 0} - - target: {fileID: 4533671617941172, guid: 0ccbba9c7befb48aabfcfb854dbdd852, type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4533671617941172, guid: 0ccbba9c7befb48aabfcfb854dbdd852, type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4533671617941172, guid: 0ccbba9c7befb48aabfcfb854dbdd852, type: 2} - propertyPath: m_LocalRotation.x - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4533671617941172, guid: 0ccbba9c7befb48aabfcfb854dbdd852, type: 2} - propertyPath: m_LocalRotation.y - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4533671617941172, guid: 0ccbba9c7befb48aabfcfb854dbdd852, type: 2} - propertyPath: m_LocalRotation.z - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4533671617941172, guid: 0ccbba9c7befb48aabfcfb854dbdd852, type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4533671617941172, guid: 0ccbba9c7befb48aabfcfb854dbdd852, type: 2} - propertyPath: m_RootOrder - value: 7 - objectReference: {fileID: 0} - - target: {fileID: 114529729461792126, guid: 0ccbba9c7befb48aabfcfb854dbdd852, - type: 2} - propertyPath: ground - value: - objectReference: {fileID: 1198770627} - - target: {fileID: 114529729461792126, guid: 0ccbba9c7befb48aabfcfb854dbdd852, - type: 2} - propertyPath: area - value: - objectReference: {fileID: 1302349068} - - target: {fileID: 114529729461792126, guid: 0ccbba9c7befb48aabfcfb854dbdd852, - type: 2} - propertyPath: goal - value: - objectReference: {fileID: 137657348} - - target: {fileID: 114529729461792126, guid: 0ccbba9c7befb48aabfcfb854dbdd852, - type: 2} - propertyPath: block - value: - objectReference: {fileID: 1956016757} - - target: {fileID: 65995918856407652, guid: 0ccbba9c7befb48aabfcfb854dbdd852, - type: 2} - propertyPath: m_Size.y - value: 300 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 0ccbba9c7befb48aabfcfb854dbdd852, type: 2} - m_IsPrefabParent: 0 ---- !u!1 &1956016757 stripped -GameObject: - m_PrefabParentObject: {fileID: 1999560460119064, guid: 0ccbba9c7befb48aabfcfb854dbdd852, - type: 2} - m_PrefabInternal: {fileID: 1733586127} diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scenes/PushBlockIL.unity.meta b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scenes/PushBlockIL.unity.meta deleted file mode 100644 index 2b81c311a7..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scenes/PushBlockIL.unity.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: 1bd7313e03fda420b90b4fcf2fc380f0 -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scenes/VisualPushBlock.unity b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scenes/VisualPushBlock.unity index 1c708dac1e..8e6b656fc1 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scenes/VisualPushBlock.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scenes/VisualPushBlock.unity @@ -167,9 +167,19 @@ Prefab: value: objectReference: {fileID: 11400000, guid: d359d2290a825421e930c94284994e3f, type: 2} + - target: {fileID: 114024228081418500, guid: 9d9b85a2a80e74e5294bdfb248825335, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 116640260} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 9d9b85a2a80e74e5294bdfb248825335, type: 2} m_IsPrefabParent: 0 +--- !u!20 &116640260 stripped +Camera: + m_PrefabParentObject: {fileID: 20961401228419460, guid: 9d9b85a2a80e74e5294bdfb248825335, + type: 2} + m_PrefabInternal: {fileID: 116640259} --- !u!1 &255077123 GameObject: m_ObjectHideFlags: 0 @@ -344,9 +354,19 @@ Prefab: value: objectReference: {fileID: 11400000, guid: d359d2290a825421e930c94284994e3f, type: 2} + - target: {fileID: 114024228081418500, guid: 9d9b85a2a80e74e5294bdfb248825335, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 731659952} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 9d9b85a2a80e74e5294bdfb248825335, type: 2} m_IsPrefabParent: 0 +--- !u!20 &731659952 stripped +Camera: + m_PrefabParentObject: {fileID: 20961401228419460, guid: 9d9b85a2a80e74e5294bdfb248825335, + type: 2} + m_PrefabInternal: {fileID: 731659951} --- !u!1 &762086410 GameObject: m_ObjectHideFlags: 0 @@ -465,9 +485,19 @@ Prefab: value: objectReference: {fileID: 11400000, guid: d359d2290a825421e930c94284994e3f, type: 2} + - target: {fileID: 114024228081418500, guid: 9d9b85a2a80e74e5294bdfb248825335, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 912811241} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 9d9b85a2a80e74e5294bdfb248825335, type: 2} m_IsPrefabParent: 0 +--- !u!20 &912811241 stripped +Camera: + m_PrefabParentObject: {fileID: 20961401228419460, guid: 9d9b85a2a80e74e5294bdfb248825335, + type: 2} + m_PrefabInternal: {fileID: 912811240} --- !u!1 &1009000883 GameObject: m_ObjectHideFlags: 0 @@ -616,10 +646,6 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: a2ca406dad5ec4ede8184998f4f9067d, type: 3} m_Name: m_EditorClassIdentifier: - broadcastHub: - broadcastingBrains: - - {fileID: 11400000, guid: d359d2290a825421e930c94284994e3f, type: 2} - m_BrainsToControl: [] m_TrainingConfiguration: width: 1280 height: 720 @@ -714,9 +740,19 @@ Prefab: value: objectReference: {fileID: 11400000, guid: d359d2290a825421e930c94284994e3f, type: 2} + - target: {fileID: 114024228081418500, guid: 9d9b85a2a80e74e5294bdfb248825335, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 1878756100} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 9d9b85a2a80e74e5294bdfb248825335, type: 2} m_IsPrefabParent: 0 +--- !u!20 &1878756100 stripped +Camera: + m_PrefabParentObject: {fileID: 20961401228419460, guid: 9d9b85a2a80e74e5294bdfb248825335, + type: 2} + m_PrefabInternal: {fileID: 1878756099} --- !u!1001 &1942601654 Prefab: m_ObjectHideFlags: 0 @@ -770,9 +806,19 @@ Prefab: value: objectReference: {fileID: 11400000, guid: d359d2290a825421e930c94284994e3f, type: 2} + - target: {fileID: 114024228081418500, guid: 9d9b85a2a80e74e5294bdfb248825335, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 1942601655} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 9d9b85a2a80e74e5294bdfb248825335, type: 2} m_IsPrefabParent: 0 +--- !u!20 &1942601655 stripped +Camera: + m_PrefabParentObject: {fileID: 20961401228419460, guid: 9d9b85a2a80e74e5294bdfb248825335, + type: 2} + m_PrefabInternal: {fileID: 1942601654} --- !u!1001 &1954420364 Prefab: m_ObjectHideFlags: 0 @@ -812,12 +858,6 @@ Prefab: propertyPath: m_RootOrder value: 6 objectReference: {fileID: 0} - - target: {fileID: 114812843792483960, guid: 9d9b85a2a80e74e5294bdfb248825335, - type: 2} - propertyPath: brain - value: - objectReference: {fileID: 11400000, guid: d359d2290a825421e930c94284994e3f, - type: 2} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 9d9b85a2a80e74e5294bdfb248825335, type: 2} m_IsPrefabParent: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scripts/PushAgentBasic.cs b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scripts/PushAgentBasic.cs index 51f22f9e45..87d19f8ae1 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scripts/PushAgentBasic.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/Scripts/PushAgentBasic.cs @@ -186,6 +186,27 @@ public override void AgentAction(float[] vectorAction, string textAction) AddReward(-1f / agentParameters.maxStep); } + public override float[] Heuristic() + { + if (Input.GetKey(KeyCode.D)) + { + return new float[] { 3 }; + } + if (Input.GetKey(KeyCode.W)) + { + return new float[] { 1 }; + } + if (Input.GetKey(KeyCode.A)) + { + return new float[] { 4 }; + } + if (Input.GetKey(KeyCode.S)) + { + return new float[] { 2 }; + } + return new float[] { 0 }; + } + /// /// Resets the block position and velocities. /// diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlock.nn b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlock.nn new file mode 100644 index 0000000000..1598e66520 Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlock.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlock.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlock.nn.meta new file mode 100644 index 0000000000..94a49f41b3 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlock.nn.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 70db47ab276e44fe0beb677ff8d69382 +ScriptedImporter: + userData: + assetBundleName: + assetBundleVariant: + script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlockLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlockLearning.nn deleted file mode 100644 index 834bf8b0ea..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlockLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlockLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlockLearning.nn.meta deleted file mode 100644 index eca5ffc6aa..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/PushBlock/TFModels/PushBlockLearning.nn.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: c60a63ad5dc0c4a029d7360054667457 -ScriptedImporter: - userData: - assetBundleName: - assetBundleVariant: - script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains.meta b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains.meta deleted file mode 100644 index ecbbd38b71..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: c65c11f5ba7354ac78679881cfde79de -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/PyramidsLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/PyramidsLearning.asset deleted file mode 100644 index 80ba4fd40b..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/PyramidsLearning.asset +++ /dev/null @@ -1,23 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: PyramidsLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 172 - numStackedVectorObservations: 1 - vectorActionSize: 05000000 - cameraResolutions: [] - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - model: {fileID: 11400000, guid: 9bafa731bfcbc4f0faa73c365e7af924, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/PyramidsLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/PyramidsLearning.asset.meta deleted file mode 100644 index 10468e38e2..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/PyramidsLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 7b7715ed1d436417db67026a47f17576 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/PyramidsPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/PyramidsPlayer.asset deleted file mode 100644 index 90bc0f8ef7..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/PyramidsPlayer.asset +++ /dev/null @@ -1,36 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: PyramidsPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 172 - numStackedVectorObservations: 1 - vectorActionSize: 05000000 - cameraResolutions: [] - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - keyContinuousPlayerActions: [] - axisContinuousPlayerActions: [] - discretePlayerActions: - - key: 119 - branchIndex: 0 - value: 1 - - key: 115 - branchIndex: 0 - value: 2 - - key: 100 - branchIndex: 0 - value: 3 - - key: 97 - branchIndex: 0 - value: 4 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/PyramidsPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/PyramidsPlayer.asset.meta deleted file mode 100644 index 04c91fbfcf..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/PyramidsPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: d60466fdbfb194c56bdaf78887f2afc8 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/VisualPyramidsLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/VisualPyramidsLearning.asset deleted file mode 100644 index 44d8bc8d8c..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/VisualPyramidsLearning.asset +++ /dev/null @@ -1,33 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: VisualPyramidsLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 0 - numStackedVectorObservations: 1 - vectorActionSize: 05000000 - cameraResolutions: - - width: 84 - height: 84 - blackAndWhite: 0 - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - graphModel: {fileID: 0} - graphPlaceholders: [] - BatchSizePlaceholderName: batch_size - VectorObservationPlacholderName: vector_observation - RecurrentInPlaceholderName: recurrent_in - RecurrentOutPlaceholderName: recurrent_out - VisualObservationPlaceholderName: [] - ActionPlaceholderName: action - PreviousActionPlaceholderName: prev_action diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/VisualPyramidsLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/VisualPyramidsLearning.asset.meta deleted file mode 100644 index 1ec658fc07..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/VisualPyramidsLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 60f0ffcd08c3b43a6bdc746cfc0c4059 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/VisualPyramidsPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/VisualPyramidsPlayer.asset deleted file mode 100644 index 2211da1e21..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/VisualPyramidsPlayer.asset +++ /dev/null @@ -1,39 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: VisualPyramidsPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 0 - numStackedVectorObservations: 1 - vectorActionSize: 05000000 - cameraResolutions: - - width: 84 - height: 84 - blackAndWhite: 0 - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - keyContinuousPlayerActions: [] - axisContinuousPlayerActions: [] - discretePlayerActions: - - key: 119 - branchIndex: 0 - value: 1 - - key: 115 - branchIndex: 0 - value: 2 - - key: 97 - branchIndex: 0 - value: 4 - - key: 100 - branchIndex: 0 - value: 3 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/VisualPyramidsPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/VisualPyramidsPlayer.asset.meta deleted file mode 100644 index 92d2a8517b..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Brains/VisualPyramidsPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 59a04e208fb8a423586adf25bf1fecd0 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Prefabs/AreaPB.prefab b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Prefabs/AreaPB.prefab index 4bc0c9e382..7ced3370d4 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Prefabs/AreaPB.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Prefabs/AreaPB.prefab @@ -128,6 +128,7 @@ GameObject: m_Component: - component: {fileID: 4518417139497368} - component: {fileID: 54596704247224538} + - component: {fileID: 114399412043818042} - component: {fileID: 114937736047215868} - component: {fileID: 114507422577425370} - component: {fileID: 65345930959735878} @@ -2872,6 +2873,27 @@ MonoBehaviour: - {fileID: 1589816231338102} numPyra: 1 range: 45 +--- !u!114 &114399412043818042 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1131043459059966} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 172 + numStackedVectorObservations: 1 + vectorActionSize: 05000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 11400000, guid: aa3fa19a09ec44a41be3da037783ad41, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: Pyramids --- !u!114 &114507422577425370 MonoBehaviour: m_ObjectHideFlags: 1 @@ -2908,10 +2930,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: b8db44472779248d3be46895c4d562d5, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 7b7715ed1d436417db67026a47f17576, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Prefabs/VisualAreaPyramids.prefab b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Prefabs/VisualAreaPyramids.prefab index 393d9cfd9e..8263a3fa61 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Prefabs/VisualAreaPyramids.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Prefabs/VisualAreaPyramids.prefab @@ -555,9 +555,10 @@ GameObject: m_Component: - component: {fileID: 4464253672231148} - component: {fileID: 54125904932801864} + - component: {fileID: 114722927650955174} - component: {fileID: 114741503533626942} - - component: {fileID: 114027965503222182} - component: {fileID: 65882754134362954} + - component: {fileID: 114674665608406760} m_Layer: 0 m_Name: Agent m_TagString: agent @@ -3134,17 +3135,6 @@ BoxCollider: serializedVersion: 2 m_Size: {x: 1450.4971, y: 985.00024, z: 100} m_Center: {x: -575.24854, y: 292.50012, z: -3144.2273} ---- !u!114 &114027965503222182 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1736680821577442} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} - m_Name: - m_EditorClassIdentifier: --- !u!114 &114404304054259594 MonoBehaviour: m_ObjectHideFlags: 1 @@ -3185,6 +3175,43 @@ MonoBehaviour: - {fileID: 1625610554007742} numPyra: 1 range: 45 +--- !u!114 &114674665608406760 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1736680821577442} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 282f342c2ab144bf38be65d4d0c4e07d, type: 3} + m_Name: + m_EditorClassIdentifier: + camera: {fileID: 20712684238256298} + sensorName: CameraSensor + width: 84 + height: 84 + grayscale: 0 +--- !u!114 &114722927650955174 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1736680821577442} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 0 + numStackedVectorObservations: 1 + vectorActionSize: 05000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 0} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: VisualPyramids --- !u!114 &114741503533626942 MonoBehaviour: m_ObjectHideFlags: 1 @@ -3196,11 +3223,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: b8db44472779248d3be46895c4d562d5, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 60f0ffcd08c3b43a6bdc746cfc0c4059, type: 2} agentParameters: - agentCameras: - - {fileID: 20712684238256298} - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scenes/Pyramids.unity b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scenes/Pyramids.unity index d7c87ccfb8..ca1175d153 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scenes/Pyramids.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scenes/Pyramids.unity @@ -1031,10 +1031,8 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: broadcastHub: - broadcastingBrains: + brainsToControl: - {fileID: 11400000, guid: 7b7715ed1d436417db67026a47f17576, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 m_TrainingConfiguration: width: 80 height: 80 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scenes/PyramidsIL.unity b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scenes/PyramidsIL.unity deleted file mode 100644 index 3946843d16..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scenes/PyramidsIL.unity +++ /dev/null @@ -1,566 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!29 &1 -OcclusionCullingSettings: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_OcclusionBakeSettings: - smallestOccluder: 5 - smallestHole: 0.25 - backfaceThreshold: 100 - m_SceneGUID: 00000000000000000000000000000000 - m_OcclusionCullingData: {fileID: 0} ---- !u!104 &2 -RenderSettings: - m_ObjectHideFlags: 0 - serializedVersion: 8 - m_Fog: 0 - m_FogColor: {r: 0.5, g: 0.5, b: 0.5, a: 1} - m_FogMode: 3 - m_FogDensity: 0.01 - m_LinearFogStart: 0 - m_LinearFogEnd: 300 - m_AmbientSkyColor: {r: 0.8, g: 0.8, b: 0.8, a: 1} - m_AmbientEquatorColor: {r: 0.6965513, g: 0, b: 1, a: 1} - m_AmbientGroundColor: {r: 1, g: 0.45977026, b: 0, a: 1} - m_AmbientIntensity: 1 - m_AmbientMode: 3 - m_SubtractiveShadowColor: {r: 0.42, g: 0.478, b: 0.627, a: 1} - m_SkyboxMaterial: {fileID: 0} - m_HaloStrength: 0.5 - m_FlareStrength: 1 - m_FlareFadeSpeed: 3 - m_HaloTexture: {fileID: 0} - m_SpotCookie: {fileID: 10001, guid: 0000000000000000e000000000000000, type: 0} - m_DefaultReflectionMode: 0 - m_DefaultReflectionResolution: 128 - m_ReflectionBounces: 1 - m_ReflectionIntensity: 1 - m_CustomReflection: {fileID: 0} - m_Sun: {fileID: 0} - m_IndirectSpecularColor: {r: 0, g: 0, b: 0, a: 1} ---- !u!157 &3 -LightmapSettings: - m_ObjectHideFlags: 0 - serializedVersion: 11 - m_GIWorkflowMode: 1 - m_GISettings: - serializedVersion: 2 - m_BounceScale: 1 - m_IndirectOutputScale: 1 - m_AlbedoBoost: 1 - m_TemporalCoherenceThreshold: 1 - m_EnvironmentLightingMode: 0 - m_EnableBakedLightmaps: 1 - m_EnableRealtimeLightmaps: 1 - m_LightmapEditorSettings: - serializedVersion: 9 - m_Resolution: 2 - m_BakeResolution: 40 - m_TextureWidth: 1024 - m_TextureHeight: 1024 - m_AO: 1 - m_AOMaxDistance: 1 - m_CompAOExponent: 1 - m_CompAOExponentDirect: 0 - m_Padding: 2 - m_LightmapParameters: {fileID: 0} - m_LightmapsBakeMode: 1 - m_TextureCompression: 1 - m_FinalGather: 0 - m_FinalGatherFiltering: 1 - m_FinalGatherRayCount: 256 - m_ReflectionCompression: 2 - m_MixedBakeMode: 2 - m_BakeBackend: 0 - m_PVRSampling: 1 - m_PVRDirectSampleCount: 32 - m_PVRSampleCount: 500 - m_PVRBounces: 2 - m_PVRFilterTypeDirect: 0 - m_PVRFilterTypeIndirect: 0 - m_PVRFilterTypeAO: 0 - m_PVRFilteringMode: 1 - m_PVRCulling: 1 - m_PVRFilteringGaussRadiusDirect: 1 - m_PVRFilteringGaussRadiusIndirect: 5 - m_PVRFilteringGaussRadiusAO: 2 - m_PVRFilteringAtrousPositionSigmaDirect: 0.5 - m_PVRFilteringAtrousPositionSigmaIndirect: 2 - m_PVRFilteringAtrousPositionSigmaAO: 1 - m_ShowResolutionOverlay: 1 - m_LightingDataAsset: {fileID: 112000002, guid: 03723c7f910c3423aa1974f1b9ce8392, - type: 2} - m_UseShadowmask: 1 ---- !u!196 &4 -NavMeshSettings: - serializedVersion: 2 - m_ObjectHideFlags: 0 - m_BuildSettings: - serializedVersion: 2 - agentTypeID: 0 - agentRadius: 0.5 - agentHeight: 2 - agentSlope: 45 - agentClimb: 0.4 - ledgeDropHeight: 0 - maxJumpAcrossDistance: 0 - minRegionArea: 2 - manualCellSize: 0 - cellSize: 0.16666667 - manualTileSize: 0 - tileSize: 256 - accuratePlacement: 0 - debug: - m_Flags: 0 - m_NavMeshData: {fileID: 0} ---- !u!1 &499540684 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 499540687} - - component: {fileID: 499540686} - - component: {fileID: 499540685} - m_Layer: 0 - m_Name: EventSystem - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!114 &499540685 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 499540684} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 1077351063, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_HorizontalAxis: Horizontal - m_VerticalAxis: Vertical - m_SubmitButton: Submit - m_CancelButton: Cancel - m_InputActionsPerSecond: 10 - m_RepeatDelay: 0.5 - m_ForceModuleActive: 0 ---- !u!114 &499540686 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 499540684} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: -619905303, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_FirstSelected: {fileID: 0} - m_sendNavigationEvents: 1 - m_DragThreshold: 5 ---- !u!4 &499540687 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 499540684} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1001 &678809666 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_LocalPosition.x - value: -50 - objectReference: {fileID: 0} - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_LocalRotation.x - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_LocalRotation.y - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_LocalRotation.z - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_RootOrder - value: 4 - objectReference: {fileID: 0} - - target: {fileID: 1464170487903594, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_Name - value: TeacherAreaPB - objectReference: {fileID: 0} - - target: {fileID: 114937736047215868, guid: bd804431e808a492bb5658bcd296e58e, - type: 2} - propertyPath: brain - value: - objectReference: {fileID: 11400000, guid: d60466fdbfb194c56bdaf78887f2afc8, - type: 2} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - m_IsPrefabParent: 0 ---- !u!1 &678809667 stripped -GameObject: - m_PrefabParentObject: {fileID: 1131043459059966, guid: bd804431e808a492bb5658bcd296e58e, - type: 2} - m_PrefabInternal: {fileID: 678809666} ---- !u!114 &678809668 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 678809667} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: d1cf16abc39fb4d6ca81222fc73d1bb5, type: 3} - m_Name: - m_EditorClassIdentifier: ---- !u!1 &1009000883 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1009000884} - - component: {fileID: 1009000887} - m_Layer: 0 - m_Name: OverviewCamera - m_TagString: MainCamera - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &1009000884 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1009000883} - m_LocalRotation: {x: 0.42261827, y: 0, z: 0, w: 0.9063079} - m_LocalPosition: {x: 0, y: 80, z: -70} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 50, y: 0, z: 0} ---- !u!20 &1009000887 -Camera: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1009000883} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.46666667, g: 0.5647059, b: 0.60784316, a: 1} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 65 - orthographic: 0 - orthographic size: 15.18 - m_Depth: 0 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 1 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!1 &1574236047 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1574236049} - - component: {fileID: 1574236048} - m_Layer: 0 - m_Name: Academy - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!114 &1574236048 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1574236047} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: dba8df9c8b16946dc88d331a301d0ab3, type: 3} - m_Name: - m_EditorClassIdentifier: - broadcastHub: - broadcastingBrains: - - {fileID: 11400000, guid: 7b7715ed1d436417db67026a47f17576, type: 2} - - {fileID: 11400000, guid: d60466fdbfb194c56bdaf78887f2afc8, type: 2} - m_BrainsToControl: - - {fileID: 11400000, guid: 7b7715ed1d436417db67026a47f17576, type: 2} - m_MaxSteps: 0 - m_TrainingConfiguration: - width: 80 - height: 80 - qualityLevel: 1 - timeScale: 100 - targetFrameRate: -1 - m_InferenceConfiguration: - width: 1280 - height: 720 - qualityLevel: 5 - timeScale: 1 - targetFrameRate: 60 - resetParameters: - m_ResetParameters: [] ---- !u!4 &1574236049 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1574236047} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0.71938086, y: 0.27357092, z: 4.1970553} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1001 &1852559197 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_RootOrder - value: 6 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchoredPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchoredPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_SizeDelta.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_SizeDelta.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMin.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMin.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMax.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMax.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_Pivot.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_Pivot.y - value: 0 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 3ce107b4a79bc4eef83afde434932a68, type: 2} - m_IsPrefabParent: 0 ---- !u!1001 &1864968136 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_LocalPosition.x - value: 50 - objectReference: {fileID: 0} - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_LocalRotation.x - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_LocalRotation.y - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_LocalRotation.z - value: -0 - objectReference: {fileID: 0} - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4603070702628152, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_RootOrder - value: 5 - objectReference: {fileID: 0} - - target: {fileID: 1464170487903594, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - propertyPath: m_Name - value: StudentAreaPB - objectReference: {fileID: 0} - - target: {fileID: 114937736047215868, guid: bd804431e808a492bb5658bcd296e58e, - type: 2} - propertyPath: brain - value: - objectReference: {fileID: 11400000, guid: 7b7715ed1d436417db67026a47f17576, - type: 2} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: bd804431e808a492bb5658bcd296e58e, type: 2} - m_IsPrefabParent: 0 ---- !u!1001 &2051113178 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.x - value: 106.38621 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.y - value: 38.840767 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.z - value: 34.72934 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.x - value: 0.31598538 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.y - value: -0.3596048 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.z - value: 0.13088542 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.w - value: 0.8681629 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_RootOrder - value: 1 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - m_IsPrefabParent: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scenes/PyramidsIL.unity.meta b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scenes/PyramidsIL.unity.meta deleted file mode 100644 index 9a350b2c38..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scenes/PyramidsIL.unity.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: c71eddcd5d5b740d1996c008d1a6b1ab -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scenes/VisualPyramids.unity b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scenes/VisualPyramids.unity index aff4ba2913..5bd98ae440 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scenes/VisualPyramids.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scenes/VisualPyramids.unity @@ -156,6 +156,11 @@ Prefab: m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 0567215293abe487b932aec366b57c8e, type: 2} m_IsPrefabParent: 0 +--- !u!20 &177604012 stripped +Camera: + m_PrefabParentObject: {fileID: 20712684238256298, guid: 0567215293abe487b932aec366b57c8e, + type: 2} + m_PrefabInternal: {fileID: 309299717} --- !u!1001 &281839921 Prefab: m_ObjectHideFlags: 0 @@ -203,6 +208,11 @@ Prefab: propertyPath: m_IsActive value: 0 objectReference: {fileID: 0} + - target: {fileID: 114538851081060382, guid: 0567215293abe487b932aec366b57c8e, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 973199703} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 0567215293abe487b932aec366b57c8e, type: 2} m_IsPrefabParent: 0 @@ -253,6 +263,11 @@ Prefab: propertyPath: m_IsActive value: 0 objectReference: {fileID: 0} + - target: {fileID: 114538851081060382, guid: 0567215293abe487b932aec366b57c8e, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 177604012} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 0567215293abe487b932aec366b57c8e, type: 2} m_IsPrefabParent: 0 @@ -411,6 +426,11 @@ Prefab: propertyPath: m_IsActive value: 0 objectReference: {fileID: 0} + - target: {fileID: 114538851081060382, guid: 0567215293abe487b932aec366b57c8e, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 1529303581} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 0567215293abe487b932aec366b57c8e, type: 2} m_IsPrefabParent: 0 @@ -461,9 +481,29 @@ Prefab: propertyPath: m_IsActive value: 0 objectReference: {fileID: 0} + - target: {fileID: 114538851081060382, guid: 0567215293abe487b932aec366b57c8e, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 816767823} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 0567215293abe487b932aec366b57c8e, type: 2} m_IsPrefabParent: 0 +--- !u!20 &816767823 stripped +Camera: + m_PrefabParentObject: {fileID: 20712684238256298, guid: 0567215293abe487b932aec366b57c8e, + type: 2} + m_PrefabInternal: {fileID: 816767822} +--- !u!20 &828837071 stripped +Camera: + m_PrefabParentObject: {fileID: 20712684238256298, guid: 0567215293abe487b932aec366b57c8e, + type: 2} + m_PrefabInternal: {fileID: 1728325040} +--- !u!20 &973199703 stripped +Camera: + m_PrefabParentObject: {fileID: 20712684238256298, guid: 0567215293abe487b932aec366b57c8e, + type: 2} + m_PrefabInternal: {fileID: 281839921} --- !u!1 &1009000883 GameObject: m_ObjectHideFlags: 0 @@ -529,6 +569,11 @@ Camera: m_OcclusionCulling: 1 m_StereoConvergence: 10 m_StereoSeparation: 0.022 +--- !u!20 &1074152210 stripped +Camera: + m_PrefabParentObject: {fileID: 20712684238256298, guid: 0567215293abe487b932aec366b57c8e, + type: 2} + m_PrefabInternal: {fileID: 1818326666} --- !u!1001 &1155497957 Prefab: m_ObjectHideFlags: 0 @@ -576,9 +621,24 @@ Prefab: propertyPath: m_IsActive value: 0 objectReference: {fileID: 0} + - target: {fileID: 114538851081060382, guid: 0567215293abe487b932aec366b57c8e, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 1194295937} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 0567215293abe487b932aec366b57c8e, type: 2} m_IsPrefabParent: 0 +--- !u!20 &1194295937 stripped +Camera: + m_PrefabParentObject: {fileID: 20712684238256298, guid: 0567215293abe487b932aec366b57c8e, + type: 2} + m_PrefabInternal: {fileID: 1155497957} +--- !u!20 &1529303581 stripped +Camera: + m_PrefabParentObject: {fileID: 20712684238256298, guid: 0567215293abe487b932aec366b57c8e, + type: 2} + m_PrefabInternal: {fileID: 714012435} --- !u!1 &1574236047 GameObject: m_ObjectHideFlags: 0 @@ -606,10 +666,6 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: dba8df9c8b16946dc88d331a301d0ab3, type: 3} m_Name: m_EditorClassIdentifier: - broadcastHub: - broadcastingBrains: - - {fileID: 11400000, guid: 60f0ffcd08c3b43a6bdc746cfc0c4059, type: 2} - m_BrainsToControl: [] m_TrainingConfiguration: width: 80 height: 80 @@ -684,6 +740,11 @@ Prefab: propertyPath: m_IsActive value: 0 objectReference: {fileID: 0} + - target: {fileID: 114538851081060382, guid: 0567215293abe487b932aec366b57c8e, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 828837071} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 0567215293abe487b932aec366b57c8e, type: 2} m_IsPrefabParent: 0 @@ -734,6 +795,11 @@ Prefab: propertyPath: m_IsActive value: 0 objectReference: {fileID: 0} + - target: {fileID: 114538851081060382, guid: 0567215293abe487b932aec366b57c8e, + type: 2} + propertyPath: camera + value: + objectReference: {fileID: 1074152210} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 0567215293abe487b932aec366b57c8e, type: 2} m_IsPrefabParent: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scripts/PyramidAgent.cs b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scripts/PyramidAgent.cs index 794cb67c82..9f0f1a3ebf 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scripts/PyramidAgent.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/Scripts/PyramidAgent.cs @@ -28,11 +28,11 @@ public override void CollectObservations() if (useVectorObs) { const float rayDistance = 35f; - float[] rayAngles = {20f, 90f, 160f, 45f, 135f, 70f, 110f}; - float[] rayAngles1 = {25f, 95f, 165f, 50f, 140f, 75f, 115f}; - float[] rayAngles2 = {15f, 85f, 155f, 40f, 130f, 65f, 105f}; + float[] rayAngles = { 20f, 90f, 160f, 45f, 135f, 70f, 110f }; + float[] rayAngles1 = { 25f, 95f, 165f, 50f, 140f, 75f, 115f }; + float[] rayAngles2 = { 15f, 85f, 155f, 40f, 130f, 65f, 105f }; - string[] detectableObjects = {"block", "wall", "goal", "switchOff", "switchOn", "stone"}; + string[] detectableObjects = { "block", "wall", "goal", "switchOff", "switchOn", "stone" }; AddVectorObs(m_RayPer.Perceive(rayDistance, rayAngles, detectableObjects, 0f, 0f)); AddVectorObs(m_RayPer.Perceive(rayDistance, rayAngles1, detectableObjects, 0f, 5f)); AddVectorObs(m_RayPer.Perceive(rayDistance, rayAngles2, detectableObjects, 0f, 10f)); @@ -46,29 +46,21 @@ public void MoveAgent(float[] act) var dirToGo = Vector3.zero; var rotateDir = Vector3.zero; - if (brain.brainParameters.vectorActionSpaceType == SpaceType.Continuous) + var action = Mathf.FloorToInt(act[0]); + switch (action) { - dirToGo = transform.forward * Mathf.Clamp(act[0], -1f, 1f); - rotateDir = transform.up * Mathf.Clamp(act[1], -1f, 1f); - } - else - { - var action = Mathf.FloorToInt(act[0]); - switch (action) - { - case 1: - dirToGo = transform.forward * 1f; - break; - case 2: - dirToGo = transform.forward * -1f; - break; - case 3: - rotateDir = transform.up * 1f; - break; - case 4: - rotateDir = transform.up * -1f; - break; - } + case 1: + dirToGo = transform.forward * 1f; + break; + case 2: + dirToGo = transform.forward * -1f; + break; + case 3: + rotateDir = transform.up * 1f; + break; + case 4: + rotateDir = transform.up * -1f; + break; } transform.Rotate(rotateDir, Time.deltaTime * 200f); m_AgentRb.AddForce(dirToGo * 2f, ForceMode.VelocityChange); @@ -80,6 +72,27 @@ public override void AgentAction(float[] vectorAction, string textAction) MoveAgent(vectorAction); } + public override float[] Heuristic() + { + if (Input.GetKey(KeyCode.D)) + { + return new float[] { 3 }; + } + if (Input.GetKey(KeyCode.W)) + { + return new float[] { 1 }; + } + if (Input.GetKey(KeyCode.A)) + { + return new float[] { 4 }; + } + if (Input.GetKey(KeyCode.S)) + { + return new float[] { 2 }; + } + return new float[] { 0 }; + } + public override void AgentReset() { var enumerable = Enumerable.Range(0, 9).OrderBy(x => Guid.NewGuid()).Take(9); diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/TFModels/Pyramids.nn b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/TFModels/Pyramids.nn new file mode 100644 index 0000000000..e2227dc86f Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/TFModels/Pyramids.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/TFModels/Pyramids.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/TFModels/Pyramids.nn.meta new file mode 100644 index 0000000000..94b9ab0d3f --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/TFModels/Pyramids.nn.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: aa3fa19a09ec44a41be3da037783ad41 +ScriptedImporter: + userData: + assetBundleName: + assetBundleVariant: + script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/TFModels/PyramidsLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/TFModels/PyramidsLearning.nn deleted file mode 100644 index 8f90c01b67..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/TFModels/PyramidsLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/TFModels/PyramidsLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Pyramids/TFModels/PyramidsLearning.nn.meta deleted file mode 100644 index a646bab902..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Pyramids/TFModels/PyramidsLearning.nn.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: 9bafa731bfcbc4f0faa73c365e7af924 -ScriptedImporter: - userData: - assetBundleName: - assetBundleVariant: - script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Reacher/Brains.meta b/UnitySDK/Assets/ML-Agents/Examples/Reacher/Brains.meta deleted file mode 100644 index 00fd2585c0..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Reacher/Brains.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 36e47df1679934b36b8f5e72131f7141 -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Reacher/Brains/ReacherLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/Reacher/Brains/ReacherLearning.asset deleted file mode 100644 index a6e5498124..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Reacher/Brains/ReacherLearning.asset +++ /dev/null @@ -1,26 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: ReacherLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 33 - numStackedVectorObservations: 1 - vectorActionSize: 04000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - - - - - vectorActionSpaceType: 1 - model: {fileID: 11400000, guid: 0c779bd93060f405cbe4446e1dcbf2a6, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Reacher/Brains/ReacherLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Reacher/Brains/ReacherLearning.asset.meta deleted file mode 100644 index ea7e26f366..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Reacher/Brains/ReacherLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: aee5a4acc5804447682bf509557afa4f -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Reacher/Prefabs/Agent.prefab b/UnitySDK/Assets/ML-Agents/Examples/Reacher/Prefabs/Agent.prefab index 4c188c69f3..ed2f291616 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Reacher/Prefabs/Agent.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/Reacher/Prefabs/Agent.prefab @@ -75,6 +75,7 @@ GameObject: serializedVersion: 5 m_Component: - component: {fileID: 4067321601414524} + - component: {fileID: 114731167133171590} - component: {fileID: 114955921823023820} m_Layer: 0 m_Name: Agent @@ -528,6 +529,27 @@ Rigidbody: m_Interpolate: 0 m_Constraints: 0 m_CollisionDetection: 0 +--- !u!114 &114731167133171590 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1395682910799436} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 33 + numStackedVectorObservations: 1 + vectorActionSize: 04000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 1 + m_Model: {fileID: 11400000, guid: d7bdb6a78154f4cf99437d67e4a569a8, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: Reacher --- !u!114 &114928491800121992 MonoBehaviour: m_ObjectHideFlags: 1 @@ -553,10 +575,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 220b156e3b142406c8b76d4db981d044, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: aee5a4acc5804447682bf509557afa4f, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 4000 resetOnDone: 1 onDemandDecision: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Reacher/Scenes/Reacher.unity b/UnitySDK/Assets/ML-Agents/Examples/Reacher/Scenes/Reacher.unity index 89422d8707..d281b21650 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Reacher/Scenes/Reacher.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/Reacher/Scenes/Reacher.unity @@ -1266,10 +1266,8 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: broadcastHub: - broadcastingBrains: + brainsToControl: - {fileID: 11400000, guid: aee5a4acc5804447682bf509557afa4f, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 m_TrainingConfiguration: width: 80 height: 80 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherDecision.cs b/UnitySDK/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherDecision.cs deleted file mode 100644 index 53c365b7cc..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherDecision.cs +++ /dev/null @@ -1,21 +0,0 @@ -using System.Collections.Generic; -using UnityEngine; -using MLAgents; - -public class ReacherDecision : Decision -{ - public override float[] Decide(List state, List observation, float reward, bool done, List memory) - { - var action = new float[4]; - for (var i = 0; i < 4; i++) - { - action[i] = Random.Range(-1f, 1f); - } - return action; - } - - public override List MakeMemory(List state, List observation, float reward, bool done, List memory) - { - return new List(); - } -} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherDecision.cs.meta b/UnitySDK/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherDecision.cs.meta deleted file mode 100644 index 96fa7f1b8b..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Reacher/Scripts/ReacherDecision.cs.meta +++ /dev/null @@ -1,12 +0,0 @@ -fileFormatVersion: 2 -guid: ab9b9df01a69049778c72ce49b13cfd9 -timeCreated: 1503355437 -licenseType: Free -MonoImporter: - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Reacher/TFModels/Reacher.nn b/UnitySDK/Assets/ML-Agents/Examples/Reacher/TFModels/Reacher.nn new file mode 100644 index 0000000000..bcda57d751 Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/Reacher/TFModels/Reacher.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Reacher/TFModels/Reacher.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Reacher/TFModels/Reacher.nn.meta new file mode 100644 index 0000000000..c1f1f6382f --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Examples/Reacher/TFModels/Reacher.nn.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: d7bdb6a78154f4cf99437d67e4a569a8 +ScriptedImporter: + userData: + assetBundleName: + assetBundleVariant: + script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Reacher/TFModels/ReacherLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/Reacher/TFModels/ReacherLearning.nn deleted file mode 100644 index 974910e25e..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/Reacher/TFModels/ReacherLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Reacher/TFModels/ReacherLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Reacher/TFModels/ReacherLearning.nn.meta deleted file mode 100644 index 8e505016aa..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Reacher/TFModels/ReacherLearning.nn.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: 0c779bd93060f405cbe4446e1dcbf2a6 -ScriptedImporter: - userData: - assetBundleName: - assetBundleVariant: - script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RandomDecision.cs b/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RandomDecision.cs deleted file mode 100644 index 3b26370ca5..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RandomDecision.cs +++ /dev/null @@ -1,47 +0,0 @@ -using System.Collections.Generic; -using UnityEngine; - -namespace MLAgents -{ - public class RandomDecision : Decision - { - public override float[] Decide( - List vectorObs, - List visualObs, - float reward, - bool done, - List memory) - { - if (brainParameters.vectorActionSpaceType == SpaceType.Continuous) - { - var act = new List(); - - for (var i = 0; i < brainParameters.vectorActionSize[0]; i++) - { - act.Add(2 * Random.value - 1); - } - - return act.ToArray(); - } - else - { - var act = new float[brainParameters.vectorActionSize.Length]; - for (var i = 0; i < brainParameters.vectorActionSize.Length; i++) - { - act[i] = Random.Range(0, brainParameters.vectorActionSize[i]); - } - return act; - } - } - - public override List MakeMemory( - List vectorObs, - List visualObs, - float reward, - bool done, - List memory) - { - return new List(); - } - } -} diff --git a/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RandomDecision.cs.meta b/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RandomDecision.cs.meta deleted file mode 100644 index c47969d709..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RandomDecision.cs.meta +++ /dev/null @@ -1,13 +0,0 @@ -fileFormatVersion: 2 -guid: 67264e06e07fb40d8939b0860ebee773 -timeCreated: 1520463350 -licenseType: Free -MonoImporter: - externalObjects: {} - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception.cs b/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception.cs index b3345c6c1b..a4b4fbc7bb 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception.cs @@ -1,16 +1,13 @@ -using System.Collections.Generic; +using System.Collections.Generic; using UnityEngine; public abstract class RayPerception : MonoBehaviour { protected List m_PerceptionBuffer = new List(); - public virtual List Perceive(float rayDistance, + abstract public List Perceive(float rayDistance, float[] rayAngles, string[] detectableObjects, - float startOffset, float endOffset) - { - return m_PerceptionBuffer; - } + float startOffset=0.0f, float endOffset=0.0f); /// /// Converts degrees to radians. diff --git a/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception2D.cs b/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception2D.cs index d113c12a4d..6c669d5950 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception2D.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception2D.cs @@ -1,4 +1,4 @@ -using System.Collections.Generic; +using System.Collections.Generic; using UnityEngine; namespace MLAgents @@ -28,8 +28,11 @@ public class RayPerception2D : RayPerception /// Radius of rays /// Angles of rays (starting from (1,0) on unit circle). /// List of tags which correspond to object types agent can see - public List Perceive(float rayDistance, - float[] rayAngles, string[] detectableObjects) + /// Unused + /// Unused + public override List Perceive(float rayDistance, + float[] rayAngles, string[] detectableObjects, + float startOffset=0.0f, float endOffset=0.0f) { m_PerceptionBuffer.Clear(); // For each ray sublist stores categorical information on detected object diff --git a/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception3D.cs b/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception3D.cs index 33a16da8fd..7b85add817 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception3D.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/SharedAssets/Scripts/RayPerception3D.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using UnityEngine; @@ -34,7 +34,7 @@ public class RayPerception3D : RayPerception /// Ending height offset of ray from center of agent. public override List Perceive(float rayDistance, float[] rayAngles, string[] detectableObjects, - float startOffset, float endOffset) + float startOffset=0.0f, float endOffset=0.0f) { if (m_SubList == null || m_SubList.Length != detectableObjects.Length + 2) m_SubList = new float[detectableObjects.Length + 2]; diff --git a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains.meta b/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains.meta deleted file mode 100644 index 1fe0b51daa..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 4e991f41af1514c39bf7a746a87aac43 -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/GoalieLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/GoalieLearning.asset deleted file mode 100644 index 68fc87f360..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/GoalieLearning.asset +++ /dev/null @@ -1,23 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: GoalieLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 112 - numStackedVectorObservations: 1 - vectorActionSize: 05000000 - cameraResolutions: [] - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - model: {fileID: 0} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/GoalieLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/GoalieLearning.asset.meta deleted file mode 100644 index db22d9d1c1..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/GoalieLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 090fa5a8588f5433bb7f878e6f5ac954 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/GoaliePlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/GoaliePlayer.asset deleted file mode 100644 index 9e99ba3b67..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/GoaliePlayer.asset +++ /dev/null @@ -1,36 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: GoaliePlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 112 - numStackedVectorObservations: 1 - vectorActionSize: 05000000 - cameraResolutions: [] - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - keyContinuousPlayerActions: [] - axisContinuousPlayerActions: [] - discretePlayerActions: - - key: 119 - branchIndex: 0 - value: 1 - - key: 115 - branchIndex: 0 - value: 2 - - key: 97 - branchIndex: 0 - value: 4 - - key: 100 - branchIndex: 0 - value: 3 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/GoaliePlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/GoaliePlayer.asset.meta deleted file mode 100644 index c46eaa6b11..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/GoaliePlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: b7b6884feff2f4a17a645d7e0b9dc8f3 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/StrikerLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/StrikerLearning.asset deleted file mode 100644 index b409665e19..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/StrikerLearning.asset +++ /dev/null @@ -1,23 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: StrikerLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 112 - numStackedVectorObservations: 1 - vectorActionSize: 07000000 - cameraResolutions: [] - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - model: {fileID: 0} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/StrikerLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/StrikerLearning.asset.meta deleted file mode 100644 index 84c4e92d9b..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/StrikerLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 29ed78b3e8fef4340b3a1f6954b88f18 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/StrikerPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/StrikerPlayer.asset deleted file mode 100644 index ae2e474dc9..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/StrikerPlayer.asset +++ /dev/null @@ -1,42 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: StrikerPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 112 - numStackedVectorObservations: 1 - vectorActionSize: 07000000 - cameraResolutions: [] - vectorActionDescriptions: - - - vectorActionSpaceType: 0 - keyContinuousPlayerActions: [] - axisContinuousPlayerActions: [] - discretePlayerActions: - - key: 119 - branchIndex: 0 - value: 1 - - key: 115 - branchIndex: 0 - value: 2 - - key: 108 - branchIndex: 0 - value: 3 - - key: 107 - branchIndex: 0 - value: 4 - - key: 97 - branchIndex: 0 - value: 5 - - key: 100 - branchIndex: 0 - value: 6 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/StrikerPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/StrikerPlayer.asset.meta deleted file mode 100644 index 1bfc6e0759..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Brains/StrikerPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 0e2b949bf7d37469786426a6d913f5af -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Prefabs/SoccerFieldTwos.prefab b/UnitySDK/Assets/ML-Agents/Examples/Soccer/Prefabs/SoccerFieldTwos.prefab index f94f5f6319..bacae626c9 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Prefabs/SoccerFieldTwos.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/Soccer/Prefabs/SoccerFieldTwos.prefab @@ -87,6 +87,7 @@ GameObject: - component: {fileID: 4277721046484044} - component: {fileID: 54348679551516588} - component: {fileID: 135232974003521068} + - component: {fileID: 114734187185382186} - component: {fileID: 114492261207303438} - component: {fileID: 114692966630797794} m_Layer: 13 @@ -177,6 +178,7 @@ GameObject: - component: {fileID: 4485793831109164} - component: {fileID: 54250052574815742} - component: {fileID: 135154818167532598} + - component: {fileID: 114105115387635628} - component: {fileID: 114698199869072806} - component: {fileID: 114381244552195858} m_Layer: 11 @@ -196,6 +198,7 @@ GameObject: - component: {fileID: 4444285537983296} - component: {fileID: 54609996481602788} - component: {fileID: 135208952479003512} + - component: {fileID: 114387866097048300} - component: {fileID: 114850431417842684} - component: {fileID: 114965771318032104} m_Layer: 13 @@ -761,6 +764,7 @@ GameObject: - component: {fileID: 4002186104597906} - component: {fileID: 54629836435839708} - component: {fileID: 135133947297127334} + - component: {fileID: 114529615399004778} - component: {fileID: 114284769194328828} - component: {fileID: 114724674330921748} m_Layer: 11 @@ -3505,6 +3509,27 @@ BoxCollider: serializedVersion: 2 m_Size: {x: 30, y: 0.1, z: 16} m_Center: {x: 0, y: 0, z: 0} +--- !u!114 &114105115387635628 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1124213441168130} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 112 + numStackedVectorObservations: 1 + vectorActionSize: 05000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 0} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: Goalie --- !u!114 &114273807544954564 MonoBehaviour: m_ObjectHideFlags: 1 @@ -3532,10 +3557,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 2a2688ef4a36349f9aa010020c32d198, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 090fa5a8588f5433bb7f878e6f5ac954, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 3000 resetOnDone: 1 onDemandDecision: 0 @@ -3555,6 +3577,27 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} m_Name: m_EditorClassIdentifier: +--- !u!114 &114387866097048300 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1131626411948014} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 112 + numStackedVectorObservations: 1 + vectorActionSize: 07000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 0} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: Striker --- !u!114 &114492261207303438 MonoBehaviour: m_ObjectHideFlags: 1 @@ -3566,10 +3609,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 2a2688ef4a36349f9aa010020c32d198, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 29ed78b3e8fef4340b3a1f6954b88f18, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 3000 resetOnDone: 1 onDemandDecision: 0 @@ -3578,6 +3618,27 @@ MonoBehaviour: agentRole: 0 area: {fileID: 114559182131992928} agentRb: {fileID: 0} +--- !u!114 &114529615399004778 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1890219402901316} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 112 + numStackedVectorObservations: 1 + vectorActionSize: 05000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 0} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: Goalie --- !u!114 &114559182131992928 MonoBehaviour: m_ObjectHideFlags: 1 @@ -3619,10 +3680,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 2a2688ef4a36349f9aa010020c32d198, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 090fa5a8588f5433bb7f878e6f5ac954, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 3000 resetOnDone: 1 onDemandDecision: 0 @@ -3642,6 +3700,27 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} m_Name: m_EditorClassIdentifier: +--- !u!114 &114734187185382186 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1095606497496374} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 112 + numStackedVectorObservations: 1 + vectorActionSize: 07000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 0} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: Striker --- !u!114 &114850431417842684 MonoBehaviour: m_ObjectHideFlags: 1 @@ -3653,10 +3732,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 2a2688ef4a36349f9aa010020c32d198, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 29ed78b3e8fef4340b3a1f6954b88f18, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 3000 resetOnDone: 1 onDemandDecision: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Scenes/SoccerTwos.unity b/UnitySDK/Assets/ML-Agents/Examples/Soccer/Scenes/SoccerTwos.unity index 8b8f61fcc2..0f11f9a755 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Soccer/Scenes/SoccerTwos.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/Soccer/Scenes/SoccerTwos.unity @@ -657,11 +657,9 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: broadcastHub: - broadcastingBrains: - - {fileID: 11400000, guid: 29ed78b3e8fef4340b3a1f6954b88f18, type: 2} + brainsToControl: - {fileID: 11400000, guid: 090fa5a8588f5433bb7f878e6f5ac954, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 + - {fileID: 11400000, guid: 29ed78b3e8fef4340b3a1f6954b88f18, type: 2} m_TrainingConfiguration: width: 800 height: 500 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Template/Scripts/TemplateDecision.cs b/UnitySDK/Assets/ML-Agents/Examples/Template/Scripts/TemplateDecision.cs deleted file mode 100644 index 371f6cbe98..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Template/Scripts/TemplateDecision.cs +++ /dev/null @@ -1,26 +0,0 @@ -using System.Collections.Generic; -using UnityEngine; -using MLAgents; - -public class TemplateDecision : Decision -{ - public override float[] Decide( - List vectorObs, - List visualObs, - float reward, - bool done, - List memory) - { - return new float[0]; - } - - public override List MakeMemory( - List vectorObs, - List visualObs, - float reward, - bool done, - List memory) - { - return new List(); - } -} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Template/Scripts/TemplateDecision.cs.meta b/UnitySDK/Assets/ML-Agents/Examples/Template/Scripts/TemplateDecision.cs.meta deleted file mode 100755 index 6e4811e181..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Template/Scripts/TemplateDecision.cs.meta +++ /dev/null @@ -1,12 +0,0 @@ -fileFormatVersion: 2 -guid: 3812a852e739e44d7ab2ad777eeb0212 -timeCreated: 1503355437 -licenseType: Free -MonoImporter: - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Brains.meta b/UnitySDK/Assets/ML-Agents/Examples/Tennis/Brains.meta deleted file mode 100644 index ef8faca612..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Brains.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: d4c3de86e5fde4fdcae2892ed75045c5 -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Brains/TennisLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/Tennis/Brains/TennisLearning.asset deleted file mode 100644 index 26cdaf2605..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Brains/TennisLearning.asset +++ /dev/null @@ -1,24 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: TennisLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 8 - numStackedVectorObservations: 3 - vectorActionSize: 02000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - vectorActionSpaceType: 1 - model: {fileID: 11400000, guid: c85010de4f32e4c88bac16d9688aaadc, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Brains/TennisLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Tennis/Brains/TennisLearning.asset.meta deleted file mode 100644 index 9699168110..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Brains/TennisLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 1674996276be448c2ad51fb139e21e05 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Brains/TennisPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/Tennis/Brains/TennisPlayer.asset deleted file mode 100644 index 577bb7010b..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Brains/TennisPlayer.asset +++ /dev/null @@ -1,34 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: TennisPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 8 - numStackedVectorObservations: 3 - vectorActionSize: 02000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - vectorActionSpaceType: 1 - keyContinuousPlayerActions: - - key: 97 - index: 0 - value: -1 - - key: 100 - index: 0 - value: 1 - - key: 119 - index: 1 - value: 1 - axisContinuousPlayerActions: [] - discretePlayerActions: [] diff --git a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Brains/TennisPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Tennis/Brains/TennisPlayer.asset.meta deleted file mode 100644 index b821426564..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Brains/TennisPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 6bf6a586a645b471bb9bd1194ae0e229 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Prefabs/TennisArea.prefab b/UnitySDK/Assets/ML-Agents/Examples/Tennis/Prefabs/TennisArea.prefab index 4acb747f66..5e72a59945 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Prefabs/TennisArea.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/Tennis/Prefabs/TennisArea.prefab @@ -40,6 +40,7 @@ GameObject: - component: {fileID: 23050935508163814} - component: {fileID: 54815576193067388} - component: {fileID: 65276341973995358} + - component: {fileID: 114176423636690854} - component: {fileID: 114915946461826994} m_Layer: 0 m_Name: AgentA @@ -297,6 +298,7 @@ GameObject: - component: {fileID: 23268445935516234} - component: {fileID: 54459681652844648} - component: {fileID: 65280384434867516} + - component: {fileID: 114399072728845634} - component: {fileID: 114800310164848628} m_Layer: 0 m_Name: AgentB @@ -1177,6 +1179,48 @@ BoxCollider: serializedVersion: 2 m_Size: {x: 0.5, y: 8, z: 11} m_Center: {x: 0, y: 0, z: 0} +--- !u!114 &114176423636690854 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1170495812642400} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 8 + numStackedVectorObservations: 3 + vectorActionSize: 02000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 1 + m_Model: {fileID: 11400000, guid: d6c5e749e4ceb4cf79640a5955706d3d, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: Tennis +--- !u!114 &114399072728845634 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1882383181950958} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 8 + numStackedVectorObservations: 3 + vectorActionSize: 02000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 1 + m_Model: {fileID: 11400000, guid: d6c5e749e4ceb4cf79640a5955706d3d, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: Tennis --- !u!114 &114800310164848628 MonoBehaviour: m_ObjectHideFlags: 1 @@ -1188,10 +1232,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: e51a3fb0b3186433ea84fc1e0549cc91, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 1674996276be448c2ad51fb139e21e05, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 @@ -1226,10 +1267,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: e51a3fb0b3186433ea84fc1e0549cc91, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 1674996276be448c2ad51fb139e21e05, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Scenes/Tennis.unity b/UnitySDK/Assets/ML-Agents/Examples/Tennis/Scenes/Tennis.unity index 91c60d7a47..6bdd2ee727 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Scenes/Tennis.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/Tennis/Scenes/Tennis.unity @@ -731,10 +731,8 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: broadcastHub: - broadcastingBrains: + brainsToControl: - {fileID: 11400000, guid: 1674996276be448c2ad51fb139e21e05, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 25000 m_TrainingConfiguration: width: 300 height: 200 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Scenes/TennisIL.unity b/UnitySDK/Assets/ML-Agents/Examples/Tennis/Scenes/TennisIL.unity deleted file mode 100644 index 310e115d57..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Scenes/TennisIL.unity +++ /dev/null @@ -1,763 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!29 &1 -OcclusionCullingSettings: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_OcclusionBakeSettings: - smallestOccluder: 5 - smallestHole: 0.25 - backfaceThreshold: 100 - m_SceneGUID: 00000000000000000000000000000000 - m_OcclusionCullingData: {fileID: 0} ---- !u!104 &2 -RenderSettings: - m_ObjectHideFlags: 0 - serializedVersion: 8 - m_Fog: 0 - m_FogColor: {r: 0.5, g: 0.5, b: 0.5, a: 1} - m_FogMode: 3 - m_FogDensity: 0.01 - m_LinearFogStart: 0 - m_LinearFogEnd: 300 - m_AmbientSkyColor: {r: 0.8, g: 0.8, b: 0.8, a: 1} - m_AmbientEquatorColor: {r: 0.114, g: 0.125, b: 0.133, a: 1} - m_AmbientGroundColor: {r: 0.047, g: 0.043, b: 0.035, a: 1} - m_AmbientIntensity: 1 - m_AmbientMode: 3 - m_SubtractiveShadowColor: {r: 0.42, g: 0.478, b: 0.627, a: 1} - m_SkyboxMaterial: {fileID: 10304, guid: 0000000000000000f000000000000000, type: 0} - m_HaloStrength: 0.5 - m_FlareStrength: 1 - m_FlareFadeSpeed: 3 - m_HaloTexture: {fileID: 0} - m_SpotCookie: {fileID: 10001, guid: 0000000000000000e000000000000000, type: 0} - m_DefaultReflectionMode: 0 - m_DefaultReflectionResolution: 128 - m_ReflectionBounces: 1 - m_ReflectionIntensity: 1 - m_CustomReflection: {fileID: 0} - m_Sun: {fileID: 0} - m_IndirectSpecularColor: {r: 0, g: 0, b: 0, a: 1} ---- !u!157 &3 -LightmapSettings: - m_ObjectHideFlags: 0 - serializedVersion: 11 - m_GIWorkflowMode: 1 - m_GISettings: - serializedVersion: 2 - m_BounceScale: 1 - m_IndirectOutputScale: 1 - m_AlbedoBoost: 1 - m_TemporalCoherenceThreshold: 1 - m_EnvironmentLightingMode: 0 - m_EnableBakedLightmaps: 1 - m_EnableRealtimeLightmaps: 1 - m_LightmapEditorSettings: - serializedVersion: 9 - m_Resolution: 2 - m_BakeResolution: 40 - m_TextureWidth: 1024 - m_TextureHeight: 1024 - m_AO: 0 - m_AOMaxDistance: 1 - m_CompAOExponent: 1 - m_CompAOExponentDirect: 0 - m_Padding: 2 - m_LightmapParameters: {fileID: 0} - m_LightmapsBakeMode: 1 - m_TextureCompression: 1 - m_FinalGather: 0 - m_FinalGatherFiltering: 1 - m_FinalGatherRayCount: 256 - m_ReflectionCompression: 2 - m_MixedBakeMode: 2 - m_BakeBackend: 0 - m_PVRSampling: 1 - m_PVRDirectSampleCount: 32 - m_PVRSampleCount: 500 - m_PVRBounces: 2 - m_PVRFilterTypeDirect: 0 - m_PVRFilterTypeIndirect: 0 - m_PVRFilterTypeAO: 0 - m_PVRFilteringMode: 1 - m_PVRCulling: 1 - m_PVRFilteringGaussRadiusDirect: 1 - m_PVRFilteringGaussRadiusIndirect: 5 - m_PVRFilteringGaussRadiusAO: 2 - m_PVRFilteringAtrousPositionSigmaDirect: 0.5 - m_PVRFilteringAtrousPositionSigmaIndirect: 2 - m_PVRFilteringAtrousPositionSigmaAO: 1 - m_ShowResolutionOverlay: 1 - m_LightingDataAsset: {fileID: 0} - m_UseShadowmask: 1 ---- !u!196 &4 -NavMeshSettings: - serializedVersion: 2 - m_ObjectHideFlags: 0 - m_BuildSettings: - serializedVersion: 2 - agentTypeID: 0 - agentRadius: 0.5 - agentHeight: 2 - agentSlope: 45 - agentClimb: 0.4 - ledgeDropHeight: 0 - maxJumpAcrossDistance: 0 - minRegionArea: 2 - manualCellSize: 0 - cellSize: 0.16666667 - manualTileSize: 0 - tileSize: 256 - accuratePlacement: 0 - debug: - m_Flags: 0 - m_NavMeshData: {fileID: 0} ---- !u!1001 &9042437 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 4172342666475122, guid: 812997c7bc2544b6f927ff684c03450f, type: 2} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4172342666475122, guid: 812997c7bc2544b6f927ff684c03450f, type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4172342666475122, guid: 812997c7bc2544b6f927ff684c03450f, type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4172342666475122, guid: 812997c7bc2544b6f927ff684c03450f, type: 2} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4172342666475122, guid: 812997c7bc2544b6f927ff684c03450f, type: 2} - propertyPath: m_LocalRotation.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4172342666475122, guid: 812997c7bc2544b6f927ff684c03450f, type: 2} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 4172342666475122, guid: 812997c7bc2544b6f927ff684c03450f, type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 4172342666475122, guid: 812997c7bc2544b6f927ff684c03450f, type: 2} - propertyPath: m_RootOrder - value: 4 - objectReference: {fileID: 0} - - target: {fileID: 1541947554534326, guid: 812997c7bc2544b6f927ff684c03450f, type: 2} - propertyPath: m_Name - value: TennisArea - objectReference: {fileID: 0} - - target: {fileID: 1170495812642400, guid: 812997c7bc2544b6f927ff684c03450f, type: 2} - propertyPath: m_Name - value: TeacherAgent - objectReference: {fileID: 0} - - target: {fileID: 1882383181950958, guid: 812997c7bc2544b6f927ff684c03450f, type: 2} - propertyPath: m_Name - value: StudentAgent - objectReference: {fileID: 0} - - target: {fileID: 114915946461826994, guid: 812997c7bc2544b6f927ff684c03450f, - type: 2} - propertyPath: brain - value: - objectReference: {fileID: 11400000, guid: 6bf6a586a645b471bb9bd1194ae0e229, - type: 2} - - target: {fileID: 23798586630434572, guid: 812997c7bc2544b6f927ff684c03450f, - type: 2} - propertyPath: m_Materials.Array.data[0] - value: - objectReference: {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - - target: {fileID: 23798586630434572, guid: 812997c7bc2544b6f927ff684c03450f, - type: 2} - propertyPath: m_Materials.Array.data[1] - value: - objectReference: {fileID: 2100000, guid: eaad04b0e0dec42229c9cb00a981d7ac, type: 2} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 812997c7bc2544b6f927ff684c03450f, type: 2} - m_IsPrefabParent: 0 ---- !u!1 &32822935 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 32822938} - - component: {fileID: 32822937} - - component: {fileID: 32822936} - m_Layer: 0 - m_Name: EventSystem - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!114 &32822936 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 32822935} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 1077351063, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_HorizontalAxis: Horizontal - m_VerticalAxis: Vertical - m_SubmitButton: Submit - m_CancelButton: Cancel - m_InputActionsPerSecond: 10 - m_RepeatDelay: 0.5 - m_ForceModuleActive: 0 ---- !u!114 &32822937 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 32822935} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: -619905303, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_FirstSelected: {fileID: 0} - m_sendNavigationEvents: 1 - m_DragThreshold: 5 ---- !u!4 &32822938 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 32822935} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 2 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!1 &957430531 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 957430536} - - component: {fileID: 957430535} - m_Layer: 0 - m_Name: Main Camera - m_TagString: MainCamera - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!20 &957430535 -Camera: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 957430531} - m_Enabled: 1 - serializedVersion: 2 - m_ClearFlags: 2 - m_BackGroundColor: {r: 0.46666667, g: 0.5647059, b: 0.60784316, a: 1} - m_NormalizedViewPortRect: - serializedVersion: 2 - x: 0 - y: 0 - width: 1 - height: 1 - near clip plane: 0.3 - far clip plane: 1000 - field of view: 50.8 - orthographic: 0 - orthographic size: 5 - m_Depth: -1 - m_CullingMask: - serializedVersion: 2 - m_Bits: 4294967295 - m_RenderingPath: -1 - m_TargetTexture: {fileID: 0} - m_TargetDisplay: 0 - m_TargetEye: 3 - m_HDR: 1 - m_AllowMSAA: 1 - m_AllowDynamicResolution: 0 - m_ForceIntoRT: 0 - m_OcclusionCulling: 1 - m_StereoConvergence: 10 - m_StereoSeparation: 0.022 ---- !u!4 &957430536 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 957430531} - m_LocalRotation: {x: 0.27689537, y: 0, z: 0, w: 0.9609001} - m_LocalPosition: {x: 0, y: 6.25, z: -20} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 32.15, y: 0, z: 0} ---- !u!1 &1022397856 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1022397857} - - component: {fileID: 1022397858} - m_Layer: 0 - m_Name: Academy - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!4 &1022397857 -Transform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1022397856} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: -2.5590992, y: 4.387929, z: 6.622064} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 0} - m_RootOrder: 3 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} ---- !u!114 &1022397858 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1022397856} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: f1aadf59c24464a9fb5b4b3a2190c972, type: 3} - m_Name: - m_EditorClassIdentifier: - broadcastHub: - broadcastingBrains: - - {fileID: 11400000, guid: 1674996276be448c2ad51fb139e21e05, type: 2} - - {fileID: 11400000, guid: 6bf6a586a645b471bb9bd1194ae0e229, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 25000 - m_TrainingConfiguration: - width: 300 - height: 200 - qualityLevel: 0 - timeScale: 100 - targetFrameRate: 60 - m_InferenceConfiguration: - width: 1280 - height: 720 - qualityLevel: 5 - timeScale: 1 - targetFrameRate: 60 - resetParameters: - m_ResetParameters: - - key: angle - value: 55 - - key: scale - value: 1 - - key: gravity - value: 9.81 ---- !u!1 &1184319689 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1184319693} - - component: {fileID: 1184319692} - - component: {fileID: 1184319691} - - component: {fileID: 1184319690} - m_Layer: 5 - m_Name: Canvas - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!114 &1184319690 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1184319689} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 1301386320, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_IgnoreReversedGraphics: 1 - m_BlockingObjects: 0 - m_BlockingMask: - serializedVersion: 2 - m_Bits: 4294967295 ---- !u!114 &1184319691 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1184319689} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 1980459831, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_UiScaleMode: 0 - m_ReferencePixelsPerUnit: 100 - m_ScaleFactor: 1 - m_ReferenceResolution: {x: 800, y: 600} - m_ScreenMatchMode: 0 - m_MatchWidthOrHeight: 0 - m_PhysicalUnit: 3 - m_FallbackScreenDPI: 96 - m_DefaultSpriteDPI: 96 - m_DynamicPixelsPerUnit: 1 ---- !u!223 &1184319692 -Canvas: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1184319689} - m_Enabled: 1 - serializedVersion: 3 - m_RenderMode: 0 - m_Camera: {fileID: 0} - m_PlaneDistance: 100 - m_PixelPerfect: 0 - m_ReceivesEvents: 1 - m_OverrideSorting: 0 - m_OverridePixelPerfect: 0 - m_SortingBucketNormalizedSize: 0 - m_AdditionalShaderChannelsFlag: 0 - m_SortingLayerID: 0 - m_SortingOrder: 0 - m_TargetDisplay: 0 ---- !u!224 &1184319693 -RectTransform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1184319689} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 0, y: 0, z: 0} - m_Children: - - {fileID: 2073469451} - - {fileID: 1871669622} - m_Father: {fileID: 0} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} - m_AnchorMin: {x: 0, y: 0} - m_AnchorMax: {x: 0, y: 0} - m_AnchoredPosition: {x: 0, y: 0} - m_SizeDelta: {x: 0, y: 0} - m_Pivot: {x: 0, y: 0} ---- !u!1001 &1593222347 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.x - value: 106.38621 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.y - value: 38.840767 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalPosition.z - value: 34.72934 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.x - value: 0.31598538 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.y - value: -0.3596048 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.z - value: 0.13088542 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_LocalRotation.w - value: 0.8681629 - objectReference: {fileID: 0} - - target: {fileID: 4943719350691982, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - propertyPath: m_RootOrder - value: 5 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 5889392e3f05b448a8a06c5def6c2dec, type: 2} - m_IsPrefabParent: 0 ---- !u!1001 &1755795249 -Prefab: - m_ObjectHideFlags: 0 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalPosition.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.z - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_LocalRotation.w - value: 1 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_RootOrder - value: 6 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchoredPosition.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchoredPosition.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_SizeDelta.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_SizeDelta.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMin.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMin.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMax.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_AnchorMax.y - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_Pivot.x - value: 0 - objectReference: {fileID: 0} - - target: {fileID: 224194346362733190, guid: 3ce107b4a79bc4eef83afde434932a68, - type: 2} - propertyPath: m_Pivot.y - value: 0 - objectReference: {fileID: 0} - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 100100000, guid: 3ce107b4a79bc4eef83afde434932a68, type: 2} - m_IsPrefabParent: 0 ---- !u!1 &1871669621 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 1871669622} - - component: {fileID: 1871669624} - - component: {fileID: 1871669623} - m_Layer: 5 - m_Name: ScoreB - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!224 &1871669622 -RectTransform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1871669621} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 1184319693} - m_RootOrder: 1 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} - m_AnchorMin: {x: 1, y: 1} - m_AnchorMax: {x: 1, y: 1} - m_AnchoredPosition: {x: -20, y: -50} - m_SizeDelta: {x: 100, y: 50} - m_Pivot: {x: 0.5, y: 0.5} ---- !u!114 &1871669623 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1871669621} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 708705254, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_Material: {fileID: 2100000, guid: 58e2b2715aaee4686a912897f823f8f5, type: 2} - m_Color: {r: 1, g: 1, b: 1, a: 1} - m_RaycastTarget: 1 - m_OnCullStateChanged: - m_PersistentCalls: - m_Calls: [] - m_TypeName: UnityEngine.UI.MaskableGraphic+CullStateChangedEvent, UnityEngine.UI, - Version=1.0.0.0, Culture=neutral, PublicKeyToken=null - m_FontData: - m_Font: {fileID: 10102, guid: 0000000000000000e000000000000000, type: 0} - m_FontSize: 40 - m_FontStyle: 0 - m_BestFit: 0 - m_MinSize: 2 - m_MaxSize: 40 - m_Alignment: 0 - m_AlignByGeometry: 0 - m_RichText: 1 - m_HorizontalOverflow: 0 - m_VerticalOverflow: 0 - m_LineSpacing: 1 - m_Text: 0 ---- !u!222 &1871669624 -CanvasRenderer: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 1871669621} ---- !u!1 &2073469450 -GameObject: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - serializedVersion: 5 - m_Component: - - component: {fileID: 2073469451} - - component: {fileID: 2073469453} - - component: {fileID: 2073469452} - m_Layer: 5 - m_Name: ScoreA - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 ---- !u!224 &2073469451 -RectTransform: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 2073469450} - m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 1184319693} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} - m_AnchorMin: {x: 0, y: 1} - m_AnchorMax: {x: 0, y: 1} - m_AnchoredPosition: {x: 100, y: -50} - m_SizeDelta: {x: 100, y: 50} - m_Pivot: {x: 0.5, y: 0.5} ---- !u!114 &2073469452 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 2073469450} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 708705254, guid: f70555f144d8491a825f0804e09c671c, type: 3} - m_Name: - m_EditorClassIdentifier: - m_Material: {fileID: 2100000, guid: 58e2b2715aaee4686a912897f823f8f5, type: 2} - m_Color: {r: 1, g: 1, b: 1, a: 1} - m_RaycastTarget: 1 - m_OnCullStateChanged: - m_PersistentCalls: - m_Calls: [] - m_TypeName: UnityEngine.UI.MaskableGraphic+CullStateChangedEvent, UnityEngine.UI, - Version=1.0.0.0, Culture=neutral, PublicKeyToken=null - m_FontData: - m_Font: {fileID: 10102, guid: 0000000000000000e000000000000000, type: 0} - m_FontSize: 40 - m_FontStyle: 0 - m_BestFit: 0 - m_MinSize: 2 - m_MaxSize: 40 - m_Alignment: 0 - m_AlignByGeometry: 0 - m_RichText: 1 - m_HorizontalOverflow: 0 - m_VerticalOverflow: 0 - m_LineSpacing: 1 - m_Text: 0 ---- !u!222 &2073469453 -CanvasRenderer: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 2073469450} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Scenes/TennisIL.unity.meta b/UnitySDK/Assets/ML-Agents/Examples/Tennis/Scenes/TennisIL.unity.meta deleted file mode 100644 index 87239bedd6..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Scenes/TennisIL.unity.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: fcb7318b9c85c40a3b186ed8a3857f0a -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Scripts/TennisAgent.cs b/UnitySDK/Assets/ML-Agents/Examples/Tennis/Scripts/TennisAgent.cs index 5178138e17..206a5e83fe 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Tennis/Scripts/TennisAgent.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/Tennis/Scripts/TennisAgent.cs @@ -80,6 +80,15 @@ public override void AgentAction(float[] vectorAction, string textAction) m_TextComponent.text = score.ToString(); } + public override float[] Heuristic() + { + var action = new float[2]; + + action[0] = Input.GetAxis("Horizontal"); + action[1] = Input.GetKey(KeyCode.Space) ? 1f : 0f; + return action; + } + public override void AgentReset() { m_InvertMult = invertX ? -1f : 1f; diff --git a/UnitySDK/Assets/ML-Agents/Examples/Tennis/TFModels/Tennis.nn b/UnitySDK/Assets/ML-Agents/Examples/Tennis/TFModels/Tennis.nn new file mode 100644 index 0000000000..53807f719a Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/Tennis/TFModels/Tennis.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Tennis/TFModels/Tennis.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Tennis/TFModels/Tennis.nn.meta new file mode 100644 index 0000000000..4747e20af5 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Examples/Tennis/TFModels/Tennis.nn.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: d6c5e749e4ceb4cf79640a5955706d3d +ScriptedImporter: + userData: + assetBundleName: + assetBundleVariant: + script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Tennis/TFModels/TennisLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/Tennis/TFModels/TennisLearning.nn deleted file mode 100644 index 337353d9f8..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/Tennis/TFModels/TennisLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Tennis/TFModels/TennisLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Tennis/TFModels/TennisLearning.nn.meta deleted file mode 100644 index 987174e07d..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Tennis/TFModels/TennisLearning.nn.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: c85010de4f32e4c88bac16d9688aaadc -ScriptedImporter: - userData: - assetBundleName: - assetBundleVariant: - script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Walker/Brains.meta b/UnitySDK/Assets/ML-Agents/Examples/Walker/Brains.meta deleted file mode 100644 index 972d158b9b..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Walker/Brains.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 23626aef723764308a863be40cbcedf3 -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Walker/Brains/WalkerLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/Walker/Brains/WalkerLearning.asset deleted file mode 100644 index 216e6cbafc..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Walker/Brains/WalkerLearning.asset +++ /dev/null @@ -1,61 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: WalkerLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 212 - numStackedVectorObservations: 1 - vectorActionSize: 27000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - vectorActionSpaceType: 1 - model: {fileID: 11400000, guid: 693a2a44fd7c64d3ca80d7444f782520, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Walker/Brains/WalkerLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/Walker/Brains/WalkerLearning.asset.meta deleted file mode 100644 index a7c1262ae7..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Walker/Brains/WalkerLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 3541a9a488cf54088a4526cff85512cc -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/Walker/Prefabs/WalkerPair.prefab b/UnitySDK/Assets/ML-Agents/Examples/Walker/Prefabs/WalkerPair.prefab index cfc155460a..52ff823795 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Walker/Prefabs/WalkerPair.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/Walker/Prefabs/WalkerPair.prefab @@ -469,6 +469,7 @@ GameObject: serializedVersion: 5 m_Component: - component: {fileID: 4821824385666130} + - component: {fileID: 114052351078996708} - component: {fileID: 114363722412740164} - component: {fileID: 114614375190687060} m_Layer: 0 @@ -2309,6 +2310,27 @@ BoxCollider: serializedVersion: 2 m_Size: {x: 1, y: 1, z: 1} m_Center: {x: 0, y: 0, z: 0} +--- !u!114 &114052351078996708 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1800913799254612} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 212 + numStackedVectorObservations: 1 + vectorActionSize: 27000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 1 + m_Model: {fileID: 11400000, guid: 4e86a19e012da43bfa5ab97ae8089b98, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: Walker --- !u!114 &114110225517277148 MonoBehaviour: m_ObjectHideFlags: 1 @@ -2416,10 +2438,7 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: ccb0f85f0009540d7ad997952e2aed7b, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 11400000, guid: 3541a9a488cf54088a4526cff85512cc, type: 2} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 5000 resetOnDone: 1 onDemandDecision: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Walker/Scenes/Walker.unity b/UnitySDK/Assets/ML-Agents/Examples/Walker/Scenes/Walker.unity index abe361053c..b68663f1bc 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Walker/Scenes/Walker.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/Walker/Scenes/Walker.unity @@ -789,10 +789,8 @@ MonoBehaviour: m_Name: m_EditorClassIdentifier: broadcastHub: - broadcastingBrains: + brainsToControl: - {fileID: 11400000, guid: 3541a9a488cf54088a4526cff85512cc, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 m_TrainingConfiguration: width: 80 height: 80 diff --git a/UnitySDK/Assets/ML-Agents/Examples/Walker/TFModels/Walker.nn b/UnitySDK/Assets/ML-Agents/Examples/Walker/TFModels/Walker.nn new file mode 100644 index 0000000000..2d115a1d71 Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/Walker/TFModels/Walker.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Walker/TFModels/Walker.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Walker/TFModels/Walker.nn.meta new file mode 100644 index 0000000000..f1806eff39 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Examples/Walker/TFModels/Walker.nn.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 4e86a19e012da43bfa5ab97ae8089b98 +ScriptedImporter: + userData: + assetBundleName: + assetBundleVariant: + script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/Walker/TFModels/WalkerLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/Walker/TFModels/WalkerLearning.nn deleted file mode 100644 index b29073a934..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/Walker/TFModels/WalkerLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/Walker/TFModels/WalkerLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/Walker/TFModels/WalkerLearning.nn.meta deleted file mode 100644 index 683c7eaa03..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/Walker/TFModels/WalkerLearning.nn.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: 693a2a44fd7c64d3ca80d7444f782520 -ScriptedImporter: - userData: - assetBundleName: - assetBundleVariant: - script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains.meta b/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains.meta deleted file mode 100644 index 9556296525..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: e430c8893645d431caa0a0d943145e25 -folderAsset: yes -DefaultImporter: - externalObjects: {} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/BigWallJumpLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/BigWallJumpLearning.asset deleted file mode 100644 index a98d5fbca5..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/BigWallJumpLearning.asset +++ /dev/null @@ -1,26 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: BigWallJumpLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 74 - numStackedVectorObservations: 6 - vectorActionSize: 03000000030000000300000002000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - - - - - vectorActionSpaceType: 0 - model: {fileID: 11400000, guid: b036370dc05b9481bbcee7db40d40b5d, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/BigWallJumpLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/BigWallJumpLearning.asset.meta deleted file mode 100644 index 1fcc3a20e6..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/BigWallJumpLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: b5f530c5bf8d64bf8a18df92e283bb9c -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/SmallWallJumpLearning.asset b/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/SmallWallJumpLearning.asset deleted file mode 100644 index 97a92fbd6f..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/SmallWallJumpLearning.asset +++ /dev/null @@ -1,26 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8b23992c8eb17439887f5e944bf04a40, type: 3} - m_Name: SmallWallJumpLearning - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 74 - numStackedVectorObservations: 6 - vectorActionSize: 03000000030000000300000002000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - - - - - vectorActionSpaceType: 0 - model: {fileID: 11400000, guid: ef4a2c4f314e94d718e08c7c71b3c5f0, type: 3} - inferenceDevice: 0 diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/SmallWallJumpLearning.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/SmallWallJumpLearning.asset.meta deleted file mode 100644 index c65710fd51..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/SmallWallJumpLearning.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 2069d6ef649a549feb29054d6af8a86f -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/WallJumpPlayer.asset b/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/WallJumpPlayer.asset deleted file mode 100644 index 23a9f2dd9b..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/WallJumpPlayer.asset +++ /dev/null @@ -1,42 +0,0 @@ -%YAML 1.1 -%TAG !u! tag:unity3d.com,2011: ---- !u!114 &11400000 -MonoBehaviour: - m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 0} - m_GameObject: {fileID: 0} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 41e9bda8f3cf1492fa74926a530f6f70, type: 3} - m_Name: WallJumpPlayer - m_EditorClassIdentifier: - brainParameters: - vectorObservationSize: 74 - numStackedVectorObservations: 6 - vectorActionSize: 03000000030000000300000002000000 - cameraResolutions: [] - vectorActionDescriptions: - - - - - - - - - vectorActionSpaceType: 0 - keyContinuousPlayerActions: [] - axisContinuousPlayerActions: [] - discretePlayerActions: - - key: 119 - branchIndex: 0 - value: 1 - - key: 115 - branchIndex: 0 - value: 2 - - key: 100 - branchIndex: 1 - value: 2 - - key: 97 - branchIndex: 1 - value: 1 - - key: 32 - branchIndex: 3 - value: 1 diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/WallJumpPlayer.asset.meta b/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/WallJumpPlayer.asset.meta deleted file mode 100644 index a7ab509031..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Brains/WallJumpPlayer.asset.meta +++ /dev/null @@ -1,8 +0,0 @@ -fileFormatVersion: 2 -guid: 39363e248a8794f05baed8bfa17eb690 -NativeFormatImporter: - externalObjects: {} - mainObjectFileID: 11400000 - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Prefabs/WallJumpArea.prefab b/UnitySDK/Assets/ML-Agents/Examples/WallJump/Prefabs/WallJumpArea.prefab index 8abad1de98..3521ab9154 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Prefabs/WallJumpArea.prefab +++ b/UnitySDK/Assets/ML-Agents/Examples/WallJump/Prefabs/WallJumpArea.prefab @@ -37,6 +37,7 @@ GameObject: - component: {fileID: 4651390251185036} - component: {fileID: 65193133000831296} - component: {fileID: 54678503543725326} + - component: {fileID: 114898893333200490} - component: {fileID: 114925928594762506} - component: {fileID: 114092229367912210} m_Layer: 0 @@ -1062,6 +1063,27 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: bb172294dbbcc408286b156a2c4b553c, type: 3} m_Name: m_EditorClassIdentifier: +--- !u!114 &114898893333200490 +MonoBehaviour: + m_ObjectHideFlags: 1 + m_PrefabParentObject: {fileID: 0} + m_PrefabInternal: {fileID: 100100000} + m_GameObject: {fileID: 1195095783991828} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 5d1c4e0b1822b495aa52bc52839ecb30, type: 3} + m_Name: + m_EditorClassIdentifier: + m_BrainParameters: + vectorObservationSize: 74 + numStackedVectorObservations: 6 + vectorActionSize: 03000000030000000300000002000000 + vectorActionDescriptions: [] + vectorActionSpaceType: 0 + m_Model: {fileID: 11400000, guid: fb2ce36eb40b6480e94ea0b5d7573e47, type: 3} + m_InferenceDevice: 0 + m_UseHeuristic: 0 + m_BehaviorName: SmallWallJump --- !u!114 &114925928594762506 MonoBehaviour: m_ObjectHideFlags: 1 @@ -1073,17 +1095,14 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 676fca959b8ee45539773905ca71afa1, type: 3} m_Name: m_EditorClassIdentifier: - brain: {fileID: 0} agentParameters: - agentCameras: [] - agentRenderTextures: [] maxStep: 2000 resetOnDone: 1 onDemandDecision: 0 numberOfActionsBetweenDecisions: 5 - noWallBrain: {fileID: 11400000, guid: 2069d6ef649a549feb29054d6af8a86f, type: 2} - smallWallBrain: {fileID: 11400000, guid: 2069d6ef649a549feb29054d6af8a86f, type: 2} - bigWallBrain: {fileID: 11400000, guid: b5f530c5bf8d64bf8a18df92e283bb9c, type: 2} + noWallBrain: {fileID: 11400000, guid: fb2ce36eb40b6480e94ea0b5d7573e47, type: 3} + smallWallBrain: {fileID: 11400000, guid: fb2ce36eb40b6480e94ea0b5d7573e47, type: 3} + bigWallBrain: {fileID: 11400000, guid: 0468bf44b1efd4992b6bf22cadb50d89, type: 3} ground: {fileID: 1324926338613664} spawnArea: {fileID: 1886170194660384} goal: {fileID: 1982078136115924} diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Scenes/WallJump.unity b/UnitySDK/Assets/ML-Agents/Examples/WallJump/Scenes/WallJump.unity index 2a523435f3..59461a669b 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Scenes/WallJump.unity +++ b/UnitySDK/Assets/ML-Agents/Examples/WallJump/Scenes/WallJump.unity @@ -38,7 +38,7 @@ RenderSettings: m_ReflectionIntensity: 1 m_CustomReflection: {fileID: 0} m_Sun: {fileID: 0} - m_IndirectSpecularColor: {r: 0.44971484, g: 0.49977952, b: 0.57563835, a: 1} + m_IndirectSpecularColor: {r: 0.44971442, g: 0.499779, b: 0.5756377, a: 1} --- !u!157 &3 LightmapSettings: m_ObjectHideFlags: 0 @@ -598,6 +598,10 @@ Prefab: propertyPath: m_Name value: Canvas - Watermark objectReference: {fileID: 0} + - target: {fileID: 1537641056927260, guid: 3ce107b4a79bc4eef83afde434932a68, type: 2} + propertyPath: m_IsActive + value: 1 + objectReference: {fileID: 0} m_RemovedComponents: [] m_ParentPrefab: {fileID: 100100000, guid: 3ce107b4a79bc4eef83afde434932a68, type: 2} m_IsPrefabParent: 0 @@ -1328,12 +1332,6 @@ MonoBehaviour: m_Script: {fileID: 11500000, guid: 50b93afe82bc647b581a706891913e7f, type: 3} m_Name: m_EditorClassIdentifier: - broadcastHub: - broadcastingBrains: - - {fileID: 11400000, guid: b5f530c5bf8d64bf8a18df92e283bb9c, type: 2} - - {fileID: 11400000, guid: 2069d6ef649a549feb29054d6af8a86f, type: 2} - m_BrainsToControl: [] - m_MaxSteps: 0 m_TrainingConfiguration: width: 80 height: 80 diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAgent.cs b/UnitySDK/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAgent.cs index 16cc2529a7..131e6d780f 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAgent.cs +++ b/UnitySDK/Assets/ML-Agents/Examples/WallJump/Scripts/WallJumpAgent.cs @@ -3,17 +3,19 @@ using System.Collections; using UnityEngine; using MLAgents; +using Barracuda; + public class WallJumpAgent : Agent { // Depending on this value, the wall will have different height int m_Configuration; // Brain to use when no wall is present - public Brain noWallBrain; + public NNModel noWallBrain; // Brain to use when a jumpable wall is present - public Brain smallWallBrain; + public NNModel smallWallBrain; // Brain to use when a wall requiring a block to jump over is present - public Brain bigWallBrain; + public NNModel bigWallBrain; public GameObject ground; public GameObject spawnArea; @@ -245,6 +247,29 @@ public override void AgentAction(float[] vectorAction, string textAction) } } + public override float[] Heuristic() + { + var action = new float[4]; + if (Input.GetKey(KeyCode.D)) + { + action[1] = 2f; + } + if (Input.GetKey(KeyCode.W)) + { + action[0] = 1f; + } + if (Input.GetKey(KeyCode.A)) + { + action[1] = 1f; + } + if (Input.GetKey(KeyCode.S)) + { + action[0] = 2f; + } + action[3] = Input.GetKey(KeyCode.Space) ? 1.0f : 0.0f; + return action; + } + // Detect when the agent hits the goal void OnTriggerStay(Collider col) { @@ -301,7 +326,7 @@ void ConfigureAgent(int config) m_Academy.resetParameters["no_wall_height"], localScale.z); wall.transform.localScale = localScale; - GiveBrain(noWallBrain); + GiveModel("SmallWallJump", noWallBrain); } else if (config == 1) { @@ -310,7 +335,7 @@ void ConfigureAgent(int config) m_Academy.resetParameters["small_wall_height"], localScale.z); wall.transform.localScale = localScale; - GiveBrain(smallWallBrain); + GiveModel("SmallWallJump", smallWallBrain); } else { @@ -323,7 +348,7 @@ void ConfigureAgent(int config) height, localScale.z); wall.transform.localScale = localScale; - GiveBrain(bigWallBrain); + GiveModel("BigWallJump", bigWallBrain); } } } diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/BigWallJump.nn b/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/BigWallJump.nn new file mode 100644 index 0000000000..7113fb3f04 Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/BigWallJump.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/BigWallJump.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/BigWallJump.nn.meta new file mode 100644 index 0000000000..2e8ba394de --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/BigWallJump.nn.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 0468bf44b1efd4992b6bf22cadb50d89 +ScriptedImporter: + userData: + assetBundleName: + assetBundleVariant: + script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/BigWallJumpLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/BigWallJumpLearning.nn deleted file mode 100644 index 5dc3c20476..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/BigWallJumpLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/BigWallJumpLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/BigWallJumpLearning.nn.meta deleted file mode 100644 index c4396c0d60..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/BigWallJumpLearning.nn.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: b036370dc05b9481bbcee7db40d40b5d -ScriptedImporter: - userData: - assetBundleName: - assetBundleVariant: - script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/SmallWallJump.nn b/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/SmallWallJump.nn new file mode 100644 index 0000000000..30c34e1294 Binary files /dev/null and b/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/SmallWallJump.nn differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/SmallWallJump.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/SmallWallJump.nn.meta new file mode 100644 index 0000000000..c92b03f555 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/SmallWallJump.nn.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: fb2ce36eb40b6480e94ea0b5d7573e47 +ScriptedImporter: + userData: + assetBundleName: + assetBundleVariant: + script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/SmallWallJumpLearning.nn b/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/SmallWallJumpLearning.nn deleted file mode 100644 index 32e58ea587..0000000000 Binary files a/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/SmallWallJumpLearning.nn and /dev/null differ diff --git a/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/SmallWallJumpLearning.nn.meta b/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/SmallWallJumpLearning.nn.meta deleted file mode 100644 index 7b3414c428..0000000000 --- a/UnitySDK/Assets/ML-Agents/Examples/WallJump/TFModels/SmallWallJumpLearning.nn.meta +++ /dev/null @@ -1,7 +0,0 @@ -fileFormatVersion: 2 -guid: ef4a2c4f314e94d718e08c7c71b3c5f0 -ScriptedImporter: - userData: - assetBundleName: - assetBundleVariant: - script: {fileID: 11500000, guid: 19ed1486aa27d4903b34839f37b8f69f, type: 3} diff --git a/UnitySDK/Assets/ML-Agents/Plugins/Barracuda.Core/Barracuda.md b/UnitySDK/Assets/ML-Agents/Plugins/Barracuda.Core/Barracuda.md index da57a08601..26c5a638cb 100644 --- a/UnitySDK/Assets/ML-Agents/Plugins/Barracuda.Core/Barracuda.md +++ b/UnitySDK/Assets/ML-Agents/Plugins/Barracuda.Core/Barracuda.md @@ -249,7 +249,7 @@ Tan Tanh ``` -P.S. some of these operations are under limited support and not all configurations are properly supported +P.S. some of these operations are under limited support and not all configurations are properly supported P.P.S. Python 3.5 or 3.6 is recommended diff --git a/UnitySDK/Assets/ML-Agents/Plugins/Barracuda.Core/LICENSE.md b/UnitySDK/Assets/ML-Agents/Plugins/Barracuda.Core/LICENSE.md index 855b4276d6..389755fbf1 100644 --- a/UnitySDK/Assets/ML-Agents/Plugins/Barracuda.Core/LICENSE.md +++ b/UnitySDK/Assets/ML-Agents/Plugins/Barracuda.Core/LICENSE.md @@ -1,6 +1,6 @@ Barracuda cross-platform Neural Net engine copyright © 2018 Unity Technologies ApS -Licensed under the Unity Companion License for Unity-dependent projects--see [Unity Companion License](http://www.unity3d.com/legal/licenses/Unity_Companion_License). +Licensed under the Unity Companion License for Unity-dependent projects--see [Unity Companion License](http://www.unity3d.com/legal/licenses/Unity_Companion_License). Unless expressly provided otherwise, the Software under this license is made available strictly on an “AS IS” BASIS WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED. Please review the license for details on these and other terms and conditions. diff --git a/UnitySDK/Assets/ML-Agents/Plugins/Barracuda.Core/ReleaseNotes.md b/UnitySDK/Assets/ML-Agents/Plugins/Barracuda.Core/ReleaseNotes.md index 46495fb6fd..a195acffb2 100644 --- a/UnitySDK/Assets/ML-Agents/Plugins/Barracuda.Core/ReleaseNotes.md +++ b/UnitySDK/Assets/ML-Agents/Plugins/Barracuda.Core/ReleaseNotes.md @@ -29,7 +29,7 @@ - TF importer: made detection of actual output node from LSTM/GRU pattern more bullet proof by skipping Const nodes. - TF importer: improved InstanceNormalization handling. - TF importer: fixed SquareDifference pattern. -- TF importer: fixed Conv2DBackpropInput (transpose convolution) import. +- TF importer: fixed Conv2DBackpropInput (transpose convolution) import. - Fixed Conv2D performance regression on some GPUs. - Fixed TextureAsTensorData.Download() to work properly with InterpretDepthAs.Channels. - Fixed bug when identity/nop layers would reuse input as an output and later causing premature release of that tensor as part of intermediate data cleanup. @@ -37,7 +37,7 @@ - Fixed double Dispose issue when worker gets garbage collected. ## 0.2.0 -- Version bumped to 0.2.0 as it brings breaking API changes, for details look below. +- Version bumped to 0.2.0 as it brings breaking API changes, for details look below. - Significantly reduced temporary memory allocations by introducing internal allocator support. Now memory is re-used between layer execution as much as possible. - Improved small workload performance on CSharp backend - Added parallel implementation for multiple activation functions on CSharp backend @@ -47,7 +47,7 @@ - Added `Summary()` method to `Worker`. Currently returns allocator information. - Tabs to spaces! Aiming at higher salary (https://stackoverflow.blog/2017/06/15/developers-use-spaces-make-money-use-tabs/). - Renamed worker type enum members: `CSharp` -> `CSharpRef`, `CSharpFast` -> `CSharp`, `Compute` -> `ComputeRef`, `ComputeFast` -> `Compute`. -- Implemented new optimized `ComputePrecompiled` worker. This worker caches Compute kernels and state beforehand to reduce CPU overhead. +- Implemented new optimized `ComputePrecompiled` worker. This worker caches Compute kernels and state beforehand to reduce CPU overhead. - Added `ExecuteAsync()` to `IWorker` interface, it returns `IEnumerator`, which enables you to control how many layers to schedule per frame (one iteration == one layer). - Added `Log` op support on Compute workers. - Optimized activation functions and ScaleBias by accessing tensor as continuous array. Gained ~2.0ms on 4 batch MobileNet (MBP2016). @@ -123,8 +123,8 @@ - Fixed compilation issues on Xbox One. - TexConv2D support was temporary disabled. - Barracuda logging now can be configured via static fields of ``Barracuda.D`` class, it allows both disable specific logging levels or just disable stack trace collection (helps with performance when profiling). -- Compute Concat implementation now will fall back to C# implementation instead of throwing exception when unsupported configuration is encountered. -- Fixed several ``ComputeBuffer`` release issues. +- Compute Concat implementation now will fall back to C# implementation instead of throwing exception when unsupported configuration is encountered. +- Fixed several ``ComputeBuffer`` release issues. - Added constructor for ``Tensor`` that allows to pass in data array. - Improved Flatten handling in TensorFlow models. - Added helper func ``ModelLoader.LoadFromStreamingAssets``. diff --git a/UnitySDK/Assets/ML-Agents/Plugins/ProtoBuffer/link.xml b/UnitySDK/Assets/ML-Agents/Plugins/ProtoBuffer/link.xml new file mode 100644 index 0000000000..857dfdd0a4 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Plugins/ProtoBuffer/link.xml @@ -0,0 +1,10 @@ + + + + + diff --git a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scenes/BouncerIL.unity.meta b/UnitySDK/Assets/ML-Agents/Plugins/ProtoBuffer/link.xml.meta similarity index 62% rename from UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scenes/BouncerIL.unity.meta rename to UnitySDK/Assets/ML-Agents/Plugins/ProtoBuffer/link.xml.meta index f7e3a1d9e7..872460e078 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/Bouncer/Scenes/BouncerIL.unity.meta +++ b/UnitySDK/Assets/ML-Agents/Plugins/ProtoBuffer/link.xml.meta @@ -1,6 +1,6 @@ fileFormatVersion: 2 -guid: 5ba7f629310d74f3f9200482b6bde8dc -DefaultImporter: +guid: f94355fa6eab94c2d8529747b92ca3e1 +TextScriptImporter: externalObjects: {} userData: assetBundleName: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Academy.cs b/UnitySDK/Assets/ML-Agents/Scripts/Academy.cs index c96e723e6f..973cb94d3a 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/Academy.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Academy.cs @@ -1,18 +1,18 @@ using UnityEngine; -using System.IO; -using System.Linq; +using System.Collections.Generic; using UnityEngine.Serialization; #if UNITY_EDITOR using UnityEditor; - #endif +using MLAgents.InferenceBrain; +using Barracuda; /** * Welcome to Unity Machine Learning Agents (ML-Agents). * - * The ML-Agents toolkit contains five entities: Academy, Brain, Agent, Communicator and - * Python API. The academy, and all its brains and connected agents live within - * a learning environment (herin called Environment), while the communicator + * The ML-Agents toolkit contains four entities: Academy, Agent, Communicator and + * Python API. The academy and connected agents live within + * a learning environment (herein called Environment), while the communicator * manages the communication between the learning environment and the Python * API. For more information on each of these entities, in addition to how to * set-up a learning environment and train the behavior of characters in a @@ -75,10 +75,8 @@ public EnvironmentConfiguration( } /// - /// An Academy is where Agent objects go to train their behaviors. More - /// specifically, an academy is a collection of Brain objects and each agent - /// in a scene is attached to one brain (a single brain may be attached to - /// multiple agents). Currently, this class is expected to be extended to + /// An Academy is where Agent objects go to train their behaviors. + /// Currently, this class is expected to be extended to /// implement the desired academy behavior. /// /// @@ -86,29 +84,26 @@ public EnvironmentConfiguration( /// The mode is determined by the presence or absence of a Communicator. In /// the presence of a communicator, the academy is run in training mode where /// the states and observations of each agent are sent through the - /// communicator. In the absence of a communciator, the academy is run in - /// inference mode where the agent behavior is determined by the brain - /// attached to it (which may be internal, heuristic or player). + /// communicator. In the absence of a communicator, the academy is run in + /// inference mode where the agent behavior is determined by the Policy + /// attached to it. /// [HelpURL("https://github.com/Unity-Technologies/ml-agents/blob/master/" + "docs/Learning-Environment-Design-Academy.md")] public abstract class Academy : MonoBehaviour { - [SerializeField] - public BroadcastHub broadcastHub = new BroadcastHub(); - - private const string k_KApiVersion = "API-10"; + private const string k_ApiVersion = "API-11"; /// Temporary storage for global gravity value /// Used to restore oringal value when deriving Academy modifies it private Vector3 m_OriginalGravity; /// Temporary storage for global fixedDeltaTime value - /// Used to restore oringal value when deriving Academy modifies it + /// Used to restore original value when deriving Academy modifies it private float m_OriginalFixedDeltaTime; /// Temporary storage for global maximumDeltaTime value - /// Used to restore oringal value when deriving Academy modifies it + /// Used to restore original value when deriving Academy modifies it private float m_OriginalMaximumDeltaTime; // Fields provided in the Inspector @@ -134,38 +129,39 @@ public abstract class Academy : MonoBehaviour /// /// /// Default reset parameters are specified in the academy Editor, and can - /// be modified when training with an external Brain by passinga config + /// be modified when training by passing a config /// dictionary at reset. /// [SerializeField] [Tooltip("List of custom parameters that can be changed in the " + "environment when it resets.")] public ResetParameters resetParameters; - public CommunicatorObjects.CustomResetParameters customResetParameters; + public CommunicatorObjects.CustomResetParametersProto customResetParameters; // Fields not provided in the Inspector. - /// Boolean flag indicating whether a communicator is accessible by the - /// environment. This also specifies whether the environment is in - /// Training or Inference mode. - bool m_IsCommunicatorOn; - - /// Keeps track of the id of the last communicator message received. - /// Remains 0 if there are no communicators. Is used to ensure that - /// the same message is not used multiple times. - private ulong m_LastCommunicatorMessageNumber; + /// + /// Returns whether or not the communicator is on. + /// + /// + /// true, if communicator is on, false otherwise. + /// + public bool IsCommunicatorOn + { + get { return Communicator != null; } + } /// If true, the Academy will use inference settings. This field is /// initialized in depending on the presence - /// or absence of a communicator. Furthermore, it can be modified by an - /// external Brain during reset via . + /// or absence of a communicator. Furthermore, it can be modified during + /// training via . bool m_IsInference = true; /// The number of episodes completed by the environment. Incremented /// each time the environment is reset. int m_EpisodeCount; - /// The number of steps completed within the current episide. Incremented + /// The number of steps completed within the current episode. Incremented /// each time a step is taken in the environment. Is reset to 0 during /// . int m_StepCount; @@ -175,25 +171,28 @@ public abstract class Academy : MonoBehaviour int m_TotalStepCount; /// Flag that indicates whether the inference/training mode of the - /// environment was switched by the external Brain. This impacts the + /// environment was switched by the training process. This impacts the /// engine settings at the next environment step. bool m_ModeSwitched; - /// Pointer to the batcher currently in use by the Academy. - Batcher m_BrainBatcher; + /// Pointer to the communicator currently in use by the Academy. + public ICommunicator Communicator; + + private bool m_Initialized; + private List m_ModelRunners = new List(); // Flag used to keep track of the first time the Academy is reset. bool m_FirstAcademyReset; - // The Academy uses a series of events to communicate with agents and - // brains to facilitate synchronization. More specifically, it ensure + // The Academy uses a series of events to communicate with agents + // to facilitate synchronization. More specifically, it ensure // that all the agents performs their steps in a consistent order (i.e. no // agent can act based on a decision before another agent has had a chance // to request a decision). - // Signals to all the Brains at each environment step so they can decide - // actions for their agents. - public event System.Action BrainDecideAction; + // Signals to all the Agents at each environment step so they can use + // their Policy to decide on their next action. + public event System.Action DecideAction; // Signals to all the listeners that the academy is being destroyed public event System.Action DestroyAction; @@ -210,29 +209,38 @@ public abstract class Academy : MonoBehaviour public event System.Action AgentResetIfDone; // Signals to all the agents at each environment step so they can send - // their state to their Brain if they have requested a decision. + // their state to their Policy if they have requested a decision. public event System.Action AgentSendState; // Signals to all the agents at each environment step so they can act if // they have requested a decision. public event System.Action AgentAct; - // Sigals to all the agents each time the Academy force resets. + // Signals to all the agents each time the Academy force resets. public event System.Action AgentForceReset; /// - /// Monobehavior function called at the very beginning of environment + /// MonoBehavior function called at the very beginning of environment /// creation. Academy uses this time to initialize internal data /// structures, initialize the environment and check for the existence /// of a communicator. /// void Awake() { - InitializeEnvironment(); + LazyInitialization(); + } + + public void LazyInitialization() + { + if (!m_Initialized) + { + InitializeEnvironment(); + m_Initialized = true; + } } // Used to read Python-provided environment parameters - private int ReadArgs() + private static int ReadArgs() { var args = System.Environment.GetCommandLineArgs(); var inputPort = ""; @@ -257,108 +265,106 @@ private void InitializeEnvironment() m_OriginalMaximumDeltaTime = Time.maximumDeltaTime; InitializeAcademy(); - ICommunicator communicator; - - var exposedBrains = broadcastHub.broadcastingBrains.Where(x => x != null).ToList(); - var controlledBrains = broadcastHub.broadcastingBrains.Where( - x => x != null && x is LearningBrain && broadcastHub.IsControlled(x)); - foreach (var brain1 in controlledBrains) - { - var brain = (LearningBrain)brain1; - brain.SetToControlledExternally(); - } - // Try to launch the communicator by usig the arguments passed at launch + // Try to launch the communicator by using the arguments passed at launch try { - communicator = new RpcCommunicator( - new CommunicatorParameters + Communicator = new RpcCommunicator( + new CommunicatorInitParameters { port = ReadArgs() }); } - // If it fails, we check if there are any external brains in the scene - // If there are : Launch the communicator on the default port - // If there arn't, there is no need for a communicator and it is set - // to null catch { - communicator = null; - if (controlledBrains.ToList().Count > 0) - { - communicator = new RpcCommunicator( - new CommunicatorParameters - { - port = 5005 - }); - } - } - - m_BrainBatcher = new Batcher(communicator); - - foreach (var trainingBrain in exposedBrains) - { - trainingBrain.SetBatcher(m_BrainBatcher); +#if UNITY_EDITOR + Communicator = new RpcCommunicator( + new CommunicatorInitParameters + { + port = 5004 + }); +#endif } - if (communicator != null) + if (Communicator != null) { - m_IsCommunicatorOn = true; - - var academyParameters = - new CommunicatorObjects.UnityRLInitializationOutput(); - academyParameters.Name = gameObject.name; - academyParameters.Version = k_KApiVersion; - foreach (var brain in exposedBrains) + // We try to exchange the first message with Python. If this fails, it means + // no Python Process is ready to train the environment. In this case, the + //environment must use Inference. + try { - var bp = brain.brainParameters; - academyParameters.BrainParameters.Add( - bp.ToProto(brain.name, broadcastHub.IsControlled(brain))); + var unityRLInitParameters = Communicator.Initialize( + new CommunicatorInitParameters + { + version = k_ApiVersion, + name = gameObject.name, + environmentResetParameters = new EnvironmentResetParameters + { + resetParameters = resetParameters, + customResetParameters = customResetParameters + } + }); + Random.InitState(unityRLInitParameters.seed); } - academyParameters.EnvironmentParameters = - new CommunicatorObjects.EnvironmentParametersProto(); - foreach (var key in resetParameters.Keys) + catch { - academyParameters.EnvironmentParameters.FloatParameters.Add( - key, resetParameters[key] - ); + Communicator = null; } - var pythonParameters = m_BrainBatcher.SendAcademyParameters(academyParameters); - Random.InitState(pythonParameters.Seed); + if (Communicator != null) + { + Communicator.QuitCommandReceived += OnQuitCommandReceived; + Communicator.ResetCommandReceived += OnResetCommand; + Communicator.RLInputReceived += OnRLInputReceived; + } } // If a communicator is enabled/provided, then we assume we are in // training mode. In the absence of a communicator, we assume we are // in inference mode. - m_IsInference = !m_IsCommunicatorOn; - BrainDecideAction += () => { }; + SetIsInference(!IsCommunicatorOn); + + DecideAction += () => { }; DestroyAction += () => { }; - AgentSetStatus += (i) => { }; + AgentSetStatus += i => { }; AgentResetIfDone += () => { }; AgentSendState += () => { }; AgentAct += () => { }; AgentForceReset += () => { }; - - // Configure the environment using the configurations provided by - // the developer in the Editor. - SetIsInference(!m_BrainBatcher.GetIsTraining()); ConfigureEnvironment(); } - private void UpdateResetParameters() + static void OnQuitCommandReceived() { - var newResetParameters = m_BrainBatcher.GetEnvironmentParameters(); - if (newResetParameters != null) +#if UNITY_EDITOR + EditorApplication.isPlaying = false; +#endif + Application.Quit(); + } + + private void OnResetCommand(EnvironmentResetParameters newResetParameters) + { + UpdateResetParameters(newResetParameters); + ForcedFullReset(); + } + + void OnRLInputReceived(UnityRLInputParameters inputParams) + { + m_IsInference = !inputParams.isTraining; + } + + private void UpdateResetParameters(EnvironmentResetParameters newResetParameters) + { + if (newResetParameters.resetParameters != null) { - foreach (var kv in newResetParameters.FloatParameters) + foreach (var kv in newResetParameters.resetParameters) { resetParameters[kv.Key] = kv.Value; } - customResetParameters = newResetParameters.CustomResetParameters; } + customResetParameters = newResetParameters.customResetParameters; } /// @@ -446,7 +452,7 @@ public void SetIsInference(bool isInference) // This signals to the academy that at the next environment step // the engine configurations need updating to the respective mode - // (i.e. training vs inference) configuraiton. + // (i.e. training vs inference) configuration. m_ModeSwitched = true; } } @@ -484,17 +490,6 @@ public int GetTotalStepCount() return m_TotalStepCount; } - /// - /// Returns whether or not the communicator is on. - /// - /// - /// true, if communicator is on, false otherwise. - /// - public bool IsCommunicatorOn() - { - return m_IsCommunicatorOn; - } - /// /// Forces the full reset. The done flags are not affected. Is either /// called the first reset at inference and every external reset @@ -503,12 +498,12 @@ public bool IsCommunicatorOn() void ForcedFullReset() { EnvironmentReset(); - AgentForceReset(); + AgentForceReset?.Invoke(); m_FirstAcademyReset = true; } /// - /// Performs a single environment update to the Academy, Brain and Agent + /// Performs a single environment update to the Academy, and Agent /// objects within the environment. /// void EnvironmentStep() @@ -518,48 +513,37 @@ void EnvironmentStep() ConfigureEnvironment(); m_ModeSwitched = false; } - - if ((m_IsCommunicatorOn) && - (m_LastCommunicatorMessageNumber != m_BrainBatcher.GetNumberMessageReceived())) + if (!m_FirstAcademyReset) { - m_LastCommunicatorMessageNumber = m_BrainBatcher.GetNumberMessageReceived(); - if (m_BrainBatcher.GetCommand() == - CommunicatorObjects.CommandProto.Reset) - { - UpdateResetParameters(); - - SetIsInference(!m_BrainBatcher.GetIsTraining()); - - ForcedFullReset(); - } - - if (m_BrainBatcher.GetCommand() == - CommunicatorObjects.CommandProto.Quit) - { -#if UNITY_EDITOR - EditorApplication.isPlaying = false; -#endif - Application.Quit(); - return; - } - } - else if (!m_FirstAcademyReset) - { - UpdateResetParameters(); ForcedFullReset(); } - AgentSetStatus(m_StepCount); + AgentSetStatus?.Invoke(m_StepCount); - AgentResetIfDone(); + using (TimerStack.Instance.Scoped("AgentResetIfDone")) + { + AgentResetIfDone?.Invoke(); + } - AgentSendState(); + using (TimerStack.Instance.Scoped("AgentSendState")) + { + AgentSendState?.Invoke(); + } - BrainDecideAction(); + using (TimerStack.Instance.Scoped("DecideAction")) + { + DecideAction?.Invoke(); + } - AcademyStep(); + using (TimerStack.Instance.Scoped("AcademyStep")) + { + AcademyStep(); + } - AgentAct(); + using (TimerStack.Instance.Scoped("AgentAct")) + { + AgentAct?.Invoke(); + } m_StepCount += 1; m_TotalStepCount += 1; @@ -576,13 +560,36 @@ void EnvironmentReset() } /// - /// Monobehavior function that dictates each environment step. + /// MonoBehaviour function that dictates each environment step. /// void FixedUpdate() { EnvironmentStep(); } + /// + /// Creates or retrieves an existing ModelRunner that uses the same + /// NNModel and the InferenceDevice as provided. + /// + /// The NNModel the ModelRunner must use + /// The brainParameters used to create + /// the ModelRunner + /// The inference device (CPU or GPU) + /// the ModelRunner will use + /// The ModelRunner compatible with the input settings + public ModelRunner GetOrCreateModelRunner( + NNModel model, BrainParameters brainParameters, InferenceDevice inferenceDevice) + { + var modelRunner = m_ModelRunners.Find(x => x.HasModel(model, inferenceDevice)); + if (modelRunner == null) + { + modelRunner = new ModelRunner( + model, brainParameters, inferenceDevice); + m_ModelRunners.Add(modelRunner); + } + return modelRunner; + } + /// /// Cleanup function /// @@ -593,7 +600,16 @@ protected virtual void OnDestroy() Time.maximumDeltaTime = m_OriginalMaximumDeltaTime; // Signal to listeners that the academy is being destroyed now - DestroyAction(); + DestroyAction?.Invoke(); + + foreach (var mr in m_ModelRunners) + { + mr.Dispose(); + } + + // TODO - Pass worker ID or some other identifier, + // so that multiple envs won't overwrite each others stats. + TimerStack.Instance.SaveJsonTimers(); } } } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Agent.cs b/UnitySDK/Assets/ML-Agents/Scripts/Agent.cs index 9c670224cb..f8abf6dfd6 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/Agent.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Agent.cs @@ -1,7 +1,8 @@ using System.Collections.Generic; -using Google.Protobuf; -using MLAgents.CommunicatorObjects; using UnityEngine; +using Barracuda; +using MLAgents.Sensor; + namespace MLAgents @@ -25,9 +26,9 @@ public struct AgentInfo public List stackedVectorObservation; /// - /// Most recent agent camera (i.e. texture) observation. + /// Most recent compressed observations. /// - public List visualObservations; + public List compressedObservations; /// /// Most recent text observation. @@ -82,58 +83,9 @@ public struct AgentInfo /// /// User-customizable object for sending structured output from Unity to Python in response /// to an action in addition to a scalar reward. + /// TODO(cgoy): All references to protobuf objects should be removed. /// - public CustomObservation customObservation; - - /// - /// Converts a AgentInfo to a protobuffer generated AgentInfoProto - /// - /// The protobuf version of the AgentInfo. - public AgentInfoProto ToProto() - { - var agentInfoProto = new AgentInfoProto - { - StackedVectorObservation = { stackedVectorObservation }, - StoredVectorActions = { storedVectorActions }, - StoredTextActions = storedTextActions, - TextObservation = textObservation, - Reward = reward, - MaxStepReached = maxStepReached, - Done = done, - Id = id, - CustomObservation = customObservation - }; - if (memories != null) - { - agentInfoProto.Memories.Add(memories); - } - - if (actionMasks != null) - { - agentInfoProto.ActionMask.AddRange(actionMasks); - } - - foreach (var obs in visualObservations) - { - agentInfoProto.VisualObservations.Add( - ByteString.CopyFrom(obs.EncodeToPNG()) - ); - } - return agentInfoProto; - } - - /// - /// Remove the visual observations from memory. Call at each timestep - /// to avoid memory leaks. - /// - public void ClearVisualObs() - { - foreach (var obs in visualObservations) - { - Object.Destroy(obs); - } - visualObservations.Clear(); - } + public CommunicatorObjects.CustomObservationProto customObservation; } /// @@ -146,7 +98,8 @@ public struct AgentAction public string textActions; public List memories; public float value; - public CustomAction customAction; + /// TODO(cgoy): All references to protobuf objects should be removed. + public CommunicatorObjects.CustomActionProto customAction; } /// @@ -157,19 +110,6 @@ public struct AgentAction [System.Serializable] public class AgentParameters { - /// - /// The list of the Camera GameObjects the agent uses for visual - /// observations. - /// - public List agentCameras = new List(); - - /// - /// The list of the RenderTextures the agent uses for visual - /// observations. - /// - public List agentRenderTextures = new List(); - - /// /// The maximum number of steps the agent takes before being done. /// @@ -212,17 +152,15 @@ public class AgentParameters /// environment. Observations are determined by the cameras attached /// to the agent in addition to the vector observations implemented by the /// user in . On the other hand, actions - /// are determined by decisions produced by a linked Brain. Currently, this + /// are determined by decisions produced by a Policy. Currently, this /// class is expected to be extended to implement the desired agent behavior. /// /// /// Simply speaking, an agent roams through an environment and at each step /// of the environment extracts its current observation, sends them to its - /// linked brain and in return receives an action from its brain. In practice, + /// policy and in return receives an action. In practice, /// however, an agent need not send its observation at every step since very - /// little may have changed between sucessive steps. Currently, how often an - /// agent updates its brain with a fresh observation is determined by the - /// Academy. + /// little may have changed between successive steps. /// /// At any step, an agent may be considered . /// This could occur due to a variety of reasons: @@ -247,8 +185,8 @@ public class AgentParameters /// set to a value larger than the academy max steps value, then the academy /// value takes precedence (since the agent max step will never be reached). /// - /// Lastly, note that at any step the brain linked to the agent is allowed to - /// change programmatically with . + /// Lastly, note that at any step the policy to the agent is allowed to + /// change model with . /// /// Implementation-wise, it is required that this class is extended and the /// virtual methods overridden. For sample implementations of agent behavior, @@ -257,15 +195,11 @@ public class AgentParameters [HelpURL("https://github.com/Unity-Technologies/ml-agents/blob/master/" + "docs/Learning-Environment-Design-Agents.md")] [System.Serializable] + [RequireComponent(typeof(BehaviorParameters))] public abstract class Agent : MonoBehaviour { - /// - /// The Brain attached to this agent. A brain can be attached either - /// directly from the Editor through AgentEditor or - /// programmatically through . It is OK for an agent - /// to not have a brain, as long as no decision is requested. - /// - [HideInInspector] public Brain brain; + private IPolicy m_Brain; + private BehaviorParameters m_PolicyFactory; /// /// Agent parameters specified within the Editor via AgentEditor. @@ -274,6 +208,11 @@ public abstract class Agent : MonoBehaviour /// Current Agent information (message sent to Brain). AgentInfo m_Info; + public AgentInfo Info + { + get { return m_Info; } + set { m_Info = value; } + } /// Current Agent action (message sent from Brain). AgentAction m_Action; @@ -329,12 +268,15 @@ public abstract class Agent : MonoBehaviour /// private DemonstrationRecorder m_Recorder; + public List m_Sensors; + /// Monobehavior function that is called when the attached GameObject /// becomes enabled or active. void OnEnable() { m_Id = gameObject.GetInstanceID(); var academy = FindObjectOfType(); + academy.LazyInitialization(); OnEnableHelper(academy); m_Recorder = GetComponent(); @@ -346,6 +288,7 @@ void OnEnableHelper(Academy academy) { m_Info = new AgentInfo(); m_Action = new AgentAction(); + m_Sensors = new List(); if (academy == null) { @@ -356,23 +299,14 @@ void OnEnableHelper(Academy academy) academy.AgentSetStatus += SetStatus; academy.AgentResetIfDone += ResetIfDone; academy.AgentSendState += SendInfo; + academy.DecideAction += DecideAction; academy.AgentAct += AgentStep; academy.AgentForceReset += _AgentReset; - - if (brain != null) - { - ResetData(); - } - else - { - Debug.Log( - string.Format( - "The Agent component attached to the " + - "GameObject {0} was initialized without a brain.", - gameObject.name)); - } - + m_PolicyFactory = GetComponent(); + m_Brain = m_PolicyFactory.GeneratePolicy(Heuristic); + ResetData(); InitializeAgent(); + InitializeSensors(); } /// Monobehavior function that is called when the attached GameObject @@ -385,28 +319,33 @@ void OnDisable() academy.AgentSetStatus -= SetStatus; academy.AgentResetIfDone -= ResetIfDone; academy.AgentSendState -= SendInfo; + academy.DecideAction -= DecideAction; academy.AgentAct -= AgentStep; academy.AgentForceReset -= ForceReset; } + m_Brain?.Dispose(); } /// - /// Updates the Brain for the agent. Any brain currently assigned to the - /// agent will be replaced with the provided one. + /// Updates the Model for the agent. Any model currently assigned to the + /// agent will be replaced with the provided one. If the arguments are + /// identical to the current parameters of the agent, the model will + /// remain unchanged. /// - /// - /// The agent unsubscribes from its current brain (if it has one) and - /// subscribes to the provided brain. This enables contextual brains, that - /// is, updating the behaviour (hence brain) of the agent depending on - /// the context of the game. For example, we may utilize one (wandering) - /// brain when an agent is randomly exploring an open world, but switch - /// to another (fighting) brain when it comes into contact with an enemy. - /// - /// New brain to subscribe this agent to - public void GiveBrain(Brain givenBrain) + /// The identifier of the behavior. This + /// will categorize the agent when training. + /// + /// The model to use for inference. + /// Define on what device the model + /// will be run. + public void GiveModel( + string behaviorName, + NNModel model, + InferenceDevice inferenceDevice = InferenceDevice.CPU) { - brain = givenBrain; - ResetData(); + m_PolicyFactory.GiveModel(behaviorName, model, inferenceDevice); + m_Brain?.Dispose(); + m_Brain = m_PolicyFactory.GeneratePolicy(Heuristic); } /// @@ -523,12 +462,7 @@ public bool IsDone() /// at the end of an episode. void ResetData() { - if (brain == null) - { - return; - } - - var param = brain.brainParameters; + var param = m_PolicyFactory.brainParameters; m_ActionMasker = new ActionMasker(param); // If we haven't initialized vectorActions, initialize to 0. This should only // happen during the creation of the Agent. In subsequent episodes, vectorAction @@ -556,12 +490,12 @@ void ResetData() new List(param.vectorObservationSize); m_Info.stackedVectorObservation = new List(param.vectorObservationSize - * brain.brainParameters.numStackedVectorObservations); + * param.numStackedVectorObservations); m_Info.stackedVectorObservation.AddRange( new float[param.vectorObservationSize * param.numStackedVectorObservations]); - m_Info.visualObservations = new List(); + m_Info.compressedObservations = new List(); m_Info.customObservation = null; } @@ -578,12 +512,53 @@ public virtual void InitializeAgent() { } + + /// + /// When the Agent uses Heuristics, it will call this method every time it + /// needs an action. This can be used for debugging or controlling the agent + /// with keyboard. + /// + /// A float array corresponding to the next action of the Agent + /// + public virtual float[] Heuristic() + { + throw new UnityAgentsException(string.Format( + "The Heuristic method was not implemented for the Agent on the " + + "{0} GameObject.", + gameObject.name)); + } + + /// + /// Set up the list of ISensors on the Agent. By default, this will select any + /// SensorBase's attached to the Agent. + /// + public void InitializeSensors() + { + var attachedSensorComponents = GetComponents(); + m_Sensors.Capacity += attachedSensorComponents.Length; + foreach (var component in attachedSensorComponents) + { + m_Sensors.Add(component.CreateSensor()); + } + + // Sort the sensors by name to ensure determinism + m_Sensors.Sort((x, y) => x.GetName().CompareTo(y.GetName())); + +#if DEBUG + // Make sure the names are actually unique + for (var i = 0; i < m_Sensors.Count - 1; i++) + { + Debug.Assert(!m_Sensors[i].GetName().Equals(m_Sensors[i + 1].GetName()), "Sensor names must be unique."); + } +#endif + } + /// /// Sends the Agent info to the linked Brain. /// void SendInfoToBrain() { - if (brain == null) + if (m_Brain == null) { return; } @@ -592,19 +567,23 @@ void SendInfoToBrain() m_Info.storedVectorActions = m_Action.vectorActions; m_Info.storedTextActions = m_Action.textActions; m_Info.vectorObservation.Clear(); + m_Info.compressedObservations.Clear(); m_ActionMasker.ResetMask(); - CollectObservations(); + using (TimerStack.Instance.Scoped("CollectObservations")) + { + CollectObservations(); + } m_Info.actionMasks = m_ActionMasker.GetMask(); - var param = brain.brainParameters; + var param = m_PolicyFactory.brainParameters; if (m_Info.vectorObservation.Count != param.vectorObservationSize) { throw new UnityAgentsException(string.Format( - "Vector Observation size mismatch between continuous " + - "agent {0} and brain {1}. " + - "Was Expecting {2} but received {3}. ", - gameObject.name, brain.name, - brain.brainParameters.vectorObservationSize, + "Vector Observation size mismatch in continuous " + + "agent {0}. " + + "Was Expecting {1} but received {2}. ", + gameObject.name, + param.vectorObservationSize, m_Info.vectorObservation.Count)); } @@ -612,54 +591,51 @@ void SendInfoToBrain() Utilities.ReplaceRange(m_Info.stackedVectorObservation, m_Info.vectorObservation, m_Info.stackedVectorObservation.Count - m_Info.vectorObservation.Count); - m_Info.visualObservations.Clear(); - var visualObservationCount = agentParameters.agentCameras.Count + agentParameters.agentRenderTextures.Count; - if (param.cameraResolutions.Length > visualObservationCount) - { - throw new UnityAgentsException(string.Format( - "Not enough cameras/renderTextures for agent {0} : Brain {1} expecting at " + - "least {2} cameras/renderTextures but only {3} were present.", - gameObject.name, brain.name, - brain.brainParameters.cameraResolutions.Length, - visualObservationCount)); - } - - //First add all cameras - for (var i = 0; i < agentParameters.agentCameras.Count; i++) - { - var obsTexture = ObservationToTexture( - agentParameters.agentCameras[i], - param.cameraResolutions[i].width, - param.cameraResolutions[i].height); - m_Info.visualObservations.Add(obsTexture); - } - - //Then add all renderTextures - var camCount = agentParameters.agentCameras.Count; - for (var i = 0; i < agentParameters.agentRenderTextures.Count; i++) - { - var obsTexture = ObservationToTexture( - agentParameters.agentRenderTextures[i], - param.cameraResolutions[camCount + i].width, - param.cameraResolutions[camCount + i].height); - m_Info.visualObservations.Add(obsTexture); - } - m_Info.reward = m_Reward; m_Info.done = m_Done; m_Info.maxStepReached = m_MaxStepReached; m_Info.id = m_Id; - brain.SendState(this, m_Info); + m_Brain.RequestDecision(this); if (m_Recorder != null && m_Recorder.record && Application.isEditor) { + // This is a bit of a hack - if we're in inference mode, compressed observations won't be generated + // But we need these to be generated for the recorder. So generate them here. + if (m_Info.compressedObservations.Count == 0) + { + GenerateSensorData(); + } + m_Recorder.WriteExperience(m_Info); } m_Info.textObservation = ""; } + /// + /// Generate data for each sensor and store it on the Agent's AgentInfo. + /// NOTE: At the moment, this is only called during training or when using a DemonstrationRecorder; + /// during inference the sensors are used to write directly to the Tensor data. This will likely change in the + /// future to be controlled by the type of brain being used. + /// + public void GenerateSensorData() + { + // Generate data for all sensors + // TODO add bool argument indicating when to compress? For now, we always will compress. + for (var i = 0; i < m_Sensors.Count; i++) + { + var sensor = m_Sensors[i]; + var compressedObs = new CompressedObservation + { + Data = sensor.GetCompressedObservation(), + Shape = sensor.GetFloatObservationShape(), + CompressionType = sensor.GetCompressionType() + }; + m_Info.compressedObservations.Add(compressedObs); + } + } + /// /// Collects the (vector, visual, text) observations of the agent. /// The agent observation describes the current environment from the @@ -866,7 +842,7 @@ public virtual void AgentAction(float[] vectorAction, string textAction) /// A custom action, defined by the user as custom protobuf message. Useful if the action is hard to encode /// as either a flat vector or a single string. /// - public virtual void AgentAction(float[] vectorAction, string textAction, CustomAction customAction) + public virtual void AgentAction(float[] vectorAction, string textAction, CommunicatorObjects.CustomActionProto customAction) { // We fall back to not using the custom action if the subclassed Agent doesn't override this method. AgentAction(vectorAction, textAction); @@ -912,6 +888,11 @@ void _AgentReset() AgentReset(); } + public void UpdateAgentAction(AgentAction action) + { + m_Action = action; + } + /// /// Updates the vector action. /// @@ -940,24 +921,6 @@ public List GetMemoriesAction() return m_Action.memories; } - /// - /// Updates the text action. - /// - /// Text actions. - public void UpdateTextAction(string textActions) - { - m_Action.textActions = textActions; - } - - /// - /// Updates the custom action. - /// - /// Custom action. - public void UpdateCustomAction(CustomAction customAction) - { - m_Action.customAction = customAction; - } - /// /// Updates the value of the agent. /// @@ -986,7 +949,7 @@ protected float ScaleAction(float rawAction, float min, float max) } /// - /// Sets the status of the agent. Will request decisions or actions according + /// Sets the status of the agent. Will request decisions or actions according /// to the Academy's stepcount. /// /// Number of current steps in episode @@ -1063,7 +1026,7 @@ void AgentStep() AgentOnDone(); } - if ((m_RequestAction) && (brain != null)) + if ((m_RequestAction) && (m_Brain != null)) { m_RequestAction = false; AgentAction(m_Action.vectorActions, m_Action.textActions, m_Action.customAction); @@ -1098,82 +1061,16 @@ void MakeRequests(int academyStepCounter) } } - /// - /// Converts a camera and corresponding resolution to a 2D texture. - /// - /// The 2D texture. - /// Camera. - /// Width of resulting 2D texture. - /// Height of resulting 2D texture. - /// Texture2D to render to. - public static Texture2D ObservationToTexture(Camera obsCamera, int width, int height) + void DecideAction() { - var texture2D = new Texture2D(width, height, TextureFormat.RGB24, false); - var oldRec = obsCamera.rect; - obsCamera.rect = new Rect(0f, 0f, 1f, 1f); - var depth = 24; - var format = RenderTextureFormat.Default; - var readWrite = RenderTextureReadWrite.Default; - - var tempRt = - RenderTexture.GetTemporary(width, height, depth, format, readWrite); - - var prevActiveRt = RenderTexture.active; - var prevCameraRt = obsCamera.targetTexture; - - // render to offscreen texture (readonly from CPU side) - RenderTexture.active = tempRt; - obsCamera.targetTexture = tempRt; - - obsCamera.Render(); - - texture2D.ReadPixels(new Rect(0, 0, texture2D.width, texture2D.height), 0, 0); - - obsCamera.targetTexture = prevCameraRt; - obsCamera.rect = oldRec; - RenderTexture.active = prevActiveRt; - RenderTexture.ReleaseTemporary(tempRt); - return texture2D; - } - - /// - /// Converts a RenderTexture and correspinding resolution to a 2D texture. - /// - /// The 2D texture. - /// RenderTexture. - /// Width of resulting 2D texture. - /// Height of resulting 2D texture. - /// Texture2D to render to. - public static Texture2D ObservationToTexture(RenderTexture obsTexture, int width, int height) - { - var texture2D = new Texture2D(width, height, TextureFormat.RGB24, false); - - if (width != texture2D.width || height != texture2D.height) - { - texture2D.Resize(width, height); - } - - if (width != obsTexture.width || height != obsTexture.height) - { - throw new UnityAgentsException(string.Format( - "RenderTexture {0} : width/height is {1}/{2} brain is expecting {3}/{4}.", - obsTexture.name, obsTexture.width, obsTexture.height, width, height)); - } - - var prevActiveRt = RenderTexture.active; - RenderTexture.active = obsTexture; - - texture2D.ReadPixels(new Rect(0, 0, texture2D.width, texture2D.height), 0, 0); - texture2D.Apply(); - RenderTexture.active = prevActiveRt; - return texture2D; + m_Brain?.DecideAction(); } /// /// Sets the custom observation for the agent for this episode. /// /// New value of the agent's custom observation. - public void SetCustomObservation(CustomObservation customObservation) + public void SetCustomObservation(CommunicatorObjects.CustomObservationProto customObservation) { m_Info.customObservation = customObservation; } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/BCTeacherHelper.cs b/UnitySDK/Assets/ML-Agents/Scripts/BCTeacherHelper.cs deleted file mode 100644 index 00b7d32304..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/BCTeacherHelper.cs +++ /dev/null @@ -1,59 +0,0 @@ -using UnityEngine; - -namespace MLAgents -{ - /// - /// Behavioral Cloning Helper script. Attach to teacher agent to enable - /// resetting the experience buffer, as well as toggling session recording. - /// - public class BcTeacherHelper : MonoBehaviour - { - bool m_RecordExperiences; - bool m_ResetBuffer; - Agent m_MyAgent; - float m_BufferResetTime; - - public KeyCode recordKey = KeyCode.R; - public KeyCode resetKey = KeyCode.C; - - // Use this for initialization - void Start() - { - m_RecordExperiences = true; - m_ResetBuffer = false; - m_MyAgent = GetComponent(); - m_BufferResetTime = Time.time; - } - - // Update is called once per frame - void Update() - { - if (Input.GetKeyDown(recordKey)) - { - m_RecordExperiences = !m_RecordExperiences; - } - - if (Input.GetKeyDown(resetKey)) - { - m_ResetBuffer = true; - m_BufferResetTime = Time.time; - } - else - { - m_ResetBuffer = false; - } - - Monitor.Log("Recording experiences " + recordKey, m_RecordExperiences.ToString()); - var timeSinceBufferReset = Time.time - m_BufferResetTime; - Monitor.Log("Seconds since buffer reset " + resetKey, - Mathf.FloorToInt(timeSinceBufferReset).ToString()); - } - - void FixedUpdate() - { - // Convert both bools into single comma separated string. Python makes - // assumption that this structure is preserved. - m_MyAgent.SetTextObs(m_RecordExperiences + "," + m_ResetBuffer); - } - } -} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Batcher.cs b/UnitySDK/Assets/ML-Agents/Scripts/Batcher.cs deleted file mode 100644 index 5712df4f98..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/Batcher.cs +++ /dev/null @@ -1,289 +0,0 @@ -using System.Collections.Generic; -using System.Linq; -using System; -using UnityEngine; - -namespace MLAgents -{ - /// - /// The batcher is an RL specific class that makes sure that the information each object in - /// Unity (Academy and Brains) wants to send to External is appropriately batched together - /// and sent only when necessary. - /// - /// The Batcher will only send a Message to the Communicator when either : - /// 1 - The academy is done - /// 2 - At least one brain has data to send - /// - /// At each step, the batcher will keep track of the brains that queried the batcher for that - /// step. The batcher can only send the batched data when all the Brains have queried the - /// Batcher. - /// - public class Batcher - { - /// The default number of agents in the scene - private const int k_NumAgents = 32; - - /// Keeps track of which brains have data to send on the current step - Dictionary m_HasData = - new Dictionary(); - - /// Keeps track of which brains queried the batcher on the current step - Dictionary m_HasQueried = - new Dictionary(); - - /// Keeps track of the agents of each brain on the current step - Dictionary> m_CurrentAgents = - new Dictionary>(); - - /// The Communicator of the batcher, sends a message at most once per step - ICommunicator m_Communicator; - - /// The current UnityRLOutput to be sent when all the brains queried the batcher - CommunicatorObjects.UnityRLOutput m_CurrentUnityRlOutput = - new CommunicatorObjects.UnityRLOutput(); - - /// Keeps track of last CommandProto sent by External - CommunicatorObjects.CommandProto m_Command; - - /// Keeps track of last EnvironmentParametersProto sent by External - CommunicatorObjects.EnvironmentParametersProto m_EnvironmentParameters; - - /// Keeps track of last training mode sent by External - bool m_IsTraining; - - /// Keeps track of the number of messages received - private ulong m_MessagesReceived; - - /// - /// Initializes a new instance of the Batcher class. - /// - /// The communicator to be used by the batcher. - public Batcher(ICommunicator communicator) - { - m_Communicator = communicator; - } - - /// - /// Sends the academy parameters through the Communicator. - /// Is used by the academy to send the AcademyParameters to the communicator. - /// - /// The External Initialization Parameters received. - /// The Unity Initialization Parameters to be sent. - public CommunicatorObjects.UnityRLInitializationInput SendAcademyParameters( - CommunicatorObjects.UnityRLInitializationOutput academyParameters) - { - CommunicatorObjects.UnityInput input; - var initializationInput = new CommunicatorObjects.UnityInput(); - try - { - initializationInput = m_Communicator.Initialize( - new CommunicatorObjects.UnityOutput - { - RlInitializationOutput = academyParameters - }, - out input); - } - catch - { - var exceptionMessage = "The Communicator was unable to connect. Please make sure the External " + - "process is ready to accept communication with Unity."; - - // Check for common error condition and add details to the exception message. - var httpProxy = Environment.GetEnvironmentVariable("HTTP_PROXY"); - var httpsProxy = Environment.GetEnvironmentVariable("HTTPS_PROXY"); - if (httpProxy != null || httpsProxy != null) - { - exceptionMessage += " Try removing HTTP_PROXY and HTTPS_PROXY from the" + - "environment variables and try again."; - } - throw new UnityAgentsException(exceptionMessage); - } - - var firstRlInput = input.RlInput; - m_Command = firstRlInput.Command; - m_EnvironmentParameters = firstRlInput.EnvironmentParameters; - m_IsTraining = firstRlInput.IsTraining; - return initializationInput.RlInitializationInput; - } - - /// - /// Gets the command. Is used by the academy to get reset or quit signals. - /// - /// The current command. - public CommunicatorObjects.CommandProto GetCommand() - { - return m_Command; - } - - /// - /// Gets the number of messages received so far. Can be used to check for new messages. - /// - /// The number of messages received since start of the simulation - public ulong GetNumberMessageReceived() - { - return m_MessagesReceived; - } - - /// - /// Gets the environment parameters. Is used by the academy to update - /// the environment parameters. - /// - /// The environment parameters. - public CommunicatorObjects.EnvironmentParametersProto GetEnvironmentParameters() - { - return m_EnvironmentParameters; - } - - /// - /// Gets the last training_mode flag External sent - /// - /// true, if training mode is requested, false otherwise. - public bool GetIsTraining() - { - return m_IsTraining; - } - - /// - /// Adds the brain to the list of brains which will be sending information to External. - /// - /// Brain key. - public void SubscribeBrain(string brainKey) - { - m_HasQueried[brainKey] = false; - m_HasData[brainKey] = false; - m_CurrentAgents[brainKey] = new List(k_NumAgents); - m_CurrentUnityRlOutput.AgentInfos.Add( - brainKey, - new CommunicatorObjects.UnityRLOutput.Types.ListAgentInfoProto()); - } - - /// - /// Sends the brain info. If at least one brain has an agent in need of - /// a decision or if the academy is done, the data is sent via - /// Communicator. Else, a new step is realized. The data can only be - /// sent once all the brains that subscribed to the batcher have tried - /// to send information. - /// - /// Brain key. - /// Agent info. - public void SendBrainInfo( - string brainKey, Dictionary agentInfo) - { - // If no communicator is initialized, the Batcher will not transmit - // BrainInfo - if (m_Communicator == null) - { - return; - } - - // The brain tried called GiveBrainInfo, update m_hasQueried - m_HasQueried[brainKey] = true; - // Populate the currentAgents dictionary - m_CurrentAgents[brainKey].Clear(); - foreach (var agent in agentInfo.Keys) - { - m_CurrentAgents[brainKey].Add(agent); - } - - // If at least one agent has data to send, then append data to - // the message and update hasSentState - if (m_CurrentAgents[brainKey].Count > 0) - { - foreach (var agent in m_CurrentAgents[brainKey]) - { - var agentInfoProto = agentInfo[agent].ToProto(); - m_CurrentUnityRlOutput.AgentInfos[brainKey].Value.Add(agentInfoProto); - // Avoid visual obs memory leak. This should be called AFTER we are done with the visual obs. - // e.g. after recording them to demo and using them for inference. - agentInfo[agent].ClearVisualObs(); - } - - m_HasData[brainKey] = true; - } - - // If any agent needs to send data, then the whole message - // must be sent - if (m_HasQueried.Values.All(x => x)) - { - if (m_HasData.Values.Any(x => x)) - { - SendBatchedMessageHelper(); - } - - // The message was just sent so we must reset hasSentState and - // triedSendState - foreach (var k in m_CurrentAgents.Keys) - { - m_HasData[k] = false; - m_HasQueried[k] = false; - } - } - } - - /// - /// Helper method that sends the current UnityRLOutput, receives the next UnityInput and - /// Applies the appropriate AgentAction to the agents. - /// - void SendBatchedMessageHelper() - { - var input = m_Communicator.Exchange( - new CommunicatorObjects.UnityOutput - { - RlOutput = m_CurrentUnityRlOutput - }); - m_MessagesReceived += 1; - - foreach (var k in m_CurrentUnityRlOutput.AgentInfos.Keys) - { - m_CurrentUnityRlOutput.AgentInfos[k].Value.Clear(); - } - - if (input == null) - { - m_Command = CommunicatorObjects.CommandProto.Quit; - return; - } - - var rlInput = input.RlInput; - - if (rlInput == null) - { - m_Command = CommunicatorObjects.CommandProto.Quit; - return; - } - - m_Command = rlInput.Command; - m_EnvironmentParameters = rlInput.EnvironmentParameters; - m_IsTraining = rlInput.IsTraining; - - if (rlInput.AgentActions == null) - { - return; - } - - foreach (var brainName in rlInput.AgentActions.Keys) - { - if (!m_CurrentAgents[brainName].Any()) - { - continue; - } - - if (!rlInput.AgentActions[brainName].Value.Any()) - { - continue; - } - - for (var i = 0; i < m_CurrentAgents[brainName].Count; i++) - { - var agent = m_CurrentAgents[brainName][i]; - var action = rlInput.AgentActions[brainName].Value[i]; - agent.UpdateVectorAction(action.VectorActions.ToArray()); - agent.UpdateMemoriesAction(action.Memories.ToList()); - agent.UpdateTextAction(action.TextActions); - agent.UpdateValueAction(action.Value); - agent.UpdateCustomAction(action.CustomAction); - } - } - } - } -} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Batcher.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Batcher.cs.meta deleted file mode 100644 index e7a87c640c..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/Batcher.cs.meta +++ /dev/null @@ -1,13 +0,0 @@ -fileFormatVersion: 2 -guid: 4243d5dc0ad5746cba578575182f8c17 -timeCreated: 1523045876 -licenseType: Free -MonoImporter: - externalObjects: {} - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Brain.cs b/UnitySDK/Assets/ML-Agents/Scripts/Brain.cs deleted file mode 100644 index a853ad96c0..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/Brain.cs +++ /dev/null @@ -1,111 +0,0 @@ -using System.Collections.Generic; -using UnityEngine; - -namespace MLAgents -{ - /// - /// Brain receive data from Agents through calls to SendState. The brain then updates the - /// actions of the agents at each FixedUpdate. - /// The Brain encapsulates the decision making process. Every Agent must be assigned a Brain, - /// but you can use the same Brain with more than one Agent. You can also create several - /// Brains, attach each of the Brain to one or more than one Agent. - /// Brain assets has several important properties that you can set using the Inspector window. - /// These properties must be appropriate for the Agents using the Brain. For example, the - /// Vector Observation Space Size property must match the length of the feature - /// vector created by an Agent exactly. - /// - public abstract class Brain : ScriptableObject - { - [SerializeField] public BrainParameters brainParameters; - - protected Dictionary m_AgentInfos = - new Dictionary(1024); - - protected Batcher m_BrainBatcher; - - [System.NonSerialized] - private bool m_IsInitialized; - - /// - /// Sets the Batcher of the Brain. The brain will call the batcher at every step and give - /// it the agent's data using SendBrainInfo at each DecideAction call. - /// - /// The Batcher the brain will use for the current session - public void SetBatcher(Batcher batcher) - { - if (batcher == null) - { - m_BrainBatcher = null; - } - else - { - m_BrainBatcher = batcher; - m_BrainBatcher.SubscribeBrain(name); - } - LazyInitialize(); - } - - /// - /// Adds the data of an agent to the current batch so it will be processed in DecideAction. - /// - /// - /// - public void SendState(Agent agent, AgentInfo info) - { - LazyInitialize(); - m_AgentInfos.Add(agent, info); - } - - /// - /// If the Brain is not initialized, it subscribes to the Academy's DecideAction Event and - /// calls the Initialize method to be implemented by child classes. - /// - private void LazyInitialize() - { - if (!m_IsInitialized) - { - var academy = FindObjectOfType(); - if (academy) - { - academy.BrainDecideAction += BrainDecideAction; - academy.DestroyAction += Shutdown; - Initialize(); - m_IsInitialized = true; - } - } - } - - /// - /// Called by the Academy when it shuts down. This ensures that the Brain cleans up properly - /// after scene changes. - /// - private void Shutdown() - { - if (m_IsInitialized) - { - m_AgentInfos.Clear(); - - m_IsInitialized = false; - } - } - - /// - /// Calls the DecideAction method that the concrete brain implements. - /// - private void BrainDecideAction() - { - m_BrainBatcher?.SendBrainInfo(name, m_AgentInfos); - DecideAction(); - } - - /// - /// Is called only once at the begening of the training or inference session. - /// - protected abstract void Initialize(); - - /// - /// Is called once per Environment Step after the Brain has been initialized. - /// - protected abstract void DecideAction(); - } -} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Brain.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Brain.cs.meta deleted file mode 100755 index eaf6f0706a..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/Brain.cs.meta +++ /dev/null @@ -1,12 +0,0 @@ -fileFormatVersion: 2 -guid: c676a8ddf5a5f4f64b35e9ed5028679d -timeCreated: 1503211687 -licenseType: Free -MonoImporter: - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/BrainParameters.cs b/UnitySDK/Assets/ML-Agents/Scripts/BrainParameters.cs deleted file mode 100644 index 52e1748943..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/BrainParameters.cs +++ /dev/null @@ -1,154 +0,0 @@ -using System; -using UnityEngine; -using System.Linq; - -namespace MLAgents -{ - public enum SpaceType - { - Discrete, - Continuous - }; - - /// - /// The resolution of a camera used by an agent. - /// The width defines the number of pixels on the horizontal axis. - /// The height defines the number of pixels on the verical axis. - /// blackAndWhite defines whether or not the image is grayscale. - /// - [Serializable] - public struct Resolution - { - /// The width of the observation in pixels - public int width; - - /// The height of the observation in pixels - public int height; - - /// - /// If true, the image will be in black and white. - /// If false, it will be in colors RGB - /// - public bool blackAndWhite; - } - - /// - /// Holds information about the Brain. It defines what are the inputs and outputs of the - /// decision process. - /// - [Serializable] - public class BrainParameters - { - /// - /// If continuous : The length of the float vector that represents - /// the state - /// If discrete : The number of possible values the state can take - /// - public int vectorObservationSize = 1; - - [Range(1, 50)] public int numStackedVectorObservations = 1; - - /// - /// If continuous : The length of the float vector that represents - /// the action - /// If discrete : The number of possible values the action can take*/ - /// - public int[] vectorActionSize = new[] {1}; - - /// The list of observation resolutions for the brain - public Resolution[] cameraResolutions; - - /// The list of strings describing what the actions correpond to */ - public string[] vectorActionDescriptions; - - /// Defines if the action is discrete or continuous - public SpaceType vectorActionSpaceType = SpaceType.Discrete; - - /// - /// Converts a Brain into to a Protobuff BrainInfoProto so it can be sent - /// - /// The BrainInfoProto generated. - /// The name of the brain. - /// Whether or not the Brain is training. - public CommunicatorObjects.BrainParametersProto - ToProto(string name, bool isTraining) - { - var brainParametersProto = new CommunicatorObjects.BrainParametersProto - { - VectorObservationSize = vectorObservationSize, - NumStackedVectorObservations = numStackedVectorObservations, - VectorActionSize = {vectorActionSize}, - VectorActionSpaceType = - (CommunicatorObjects.SpaceTypeProto)vectorActionSpaceType, - BrainName = name, - IsTraining = isTraining - }; - brainParametersProto.VectorActionDescriptions.AddRange(vectorActionDescriptions); - foreach (var res in cameraResolutions) - { - brainParametersProto.CameraResolutions.Add( - new CommunicatorObjects.ResolutionProto - { - Width = res.width, - Height = res.height, - GrayScale = res.blackAndWhite - }); - } - - return brainParametersProto; - } - - public BrainParameters() - { - } - - /// - /// Converts Resolution protobuf array to C# Resolution array. - /// - private static Resolution[] ResolutionProtoToNative( - CommunicatorObjects.ResolutionProto[] resolutionProtos) - { - var localCameraResolutions = new Resolution[resolutionProtos.Length]; - for (var i = 0; i < resolutionProtos.Length; i++) - { - localCameraResolutions[i] = new Resolution - { - height = resolutionProtos[i].Height, - width = resolutionProtos[i].Width, - blackAndWhite = resolutionProtos[i].GrayScale - }; - } - - return localCameraResolutions; - } - - public BrainParameters(CommunicatorObjects.BrainParametersProto brainParametersProto) - { - vectorObservationSize = brainParametersProto.VectorObservationSize; - cameraResolutions = ResolutionProtoToNative( - brainParametersProto.CameraResolutions.ToArray() - ); - numStackedVectorObservations = brainParametersProto.NumStackedVectorObservations; - vectorActionSize = brainParametersProto.VectorActionSize.ToArray(); - vectorActionDescriptions = brainParametersProto.VectorActionDescriptions.ToArray(); - vectorActionSpaceType = (SpaceType)brainParametersProto.VectorActionSpaceType; - } - - /// - /// Deep clones the BrainParameter object - /// - /// A new BrainParameter object with the same values as the original. - public BrainParameters Clone() - { - return new BrainParameters() - { - vectorObservationSize = vectorObservationSize, - numStackedVectorObservations = numStackedVectorObservations, - vectorActionSize = (int[])vectorActionSize.Clone(), - cameraResolutions = (Resolution[])cameraResolutions.Clone(), - vectorActionDescriptions = (string[])vectorActionDescriptions.Clone(), - vectorActionSpaceType = vectorActionSpaceType - }; - } - } -} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/BroadcastHub.cs b/UnitySDK/Assets/ML-Agents/Scripts/BroadcastHub.cs deleted file mode 100644 index 25e4b8dbe2..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/BroadcastHub.cs +++ /dev/null @@ -1,70 +0,0 @@ -using System.Collections.Generic; -using UnityEngine; -using UnityEngine.Serialization; - -namespace MLAgents -{ - /// - /// BroadcastHub holds reference to brains and keeps track wether or not the brain be - /// remotely controlled. - /// - [System.Serializable] - public class BroadcastHub - { - [SerializeField] - public List broadcastingBrains = new List(); - [FormerlySerializedAs("_brainsToControl")] - [SerializeField] - private List m_BrainsToControl = new List(); - - /// - /// The number of Brains inside the BroadcastingHub. - /// - public int Count - { - get { return broadcastingBrains.Count; } - } - - /// - /// Checks that a given Brain is set to be remote controlled. - /// - /// The Brain that is beeing checked - /// true if the Brain is set to Controlled and false otherwise. Will return - /// false if the Brain is not present in the Hub. - public bool IsControlled(Brain brain) - { - return m_BrainsToControl.Contains(brain); - } - - /// - /// Sets a brain to controlled. - /// - /// The Brain that is being set to controlled - /// if true, the Brain will be set to remote controlled. Otherwise - /// the brain will be set to broadcast only. - public void SetControlled(Brain brain, bool controlled) - { - if (broadcastingBrains.Contains(brain)) - { - if (controlled && !m_BrainsToControl.Contains(brain)) - { - m_BrainsToControl.Add(brain); - } - - if (!controlled && m_BrainsToControl.Contains(brain)) - { - m_BrainsToControl.Remove(brain); - } - } - } - - /// - /// Removes all the Brains of the BroadcastHub - /// - public void Clear() - { - broadcastingBrains.Clear(); - m_BrainsToControl.Clear(); - } - } -} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/BroadcastHub.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/BroadcastHub.cs.meta deleted file mode 100644 index 70bcf9b672..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/BroadcastHub.cs.meta +++ /dev/null @@ -1,3 +0,0 @@ -fileFormatVersion: 2 -guid: e43fd511c9f147e487d80e0bab3f6c6b -timeCreated: 1536851538 \ No newline at end of file diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CommandProto.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CommandProto.cs.meta deleted file mode 100644 index 6443336bad..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CommandProto.cs.meta +++ /dev/null @@ -1,11 +0,0 @@ -fileFormatVersion: 2 -guid: 6b2ff9fe2c38b4e79aba78908cc5492c -MonoImporter: - externalObjects: {} - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/DemonstrationMetaProto.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/DemonstrationMetaProto.cs.meta deleted file mode 100644 index f62ed06414..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/DemonstrationMetaProto.cs.meta +++ /dev/null @@ -1,11 +0,0 @@ -fileFormatVersion: 2 -guid: f7abfeda342414e059423ef90ede4c30 -MonoImporter: - externalObjects: {} - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/EngineConfigurationProto.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/EngineConfigurationProto.cs.meta deleted file mode 100644 index 613b83d39d..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/EngineConfigurationProto.cs.meta +++ /dev/null @@ -1,11 +0,0 @@ -fileFormatVersion: 2 -guid: 2cebeb1263d7846b4b3c7c6e5d5e193f -MonoImporter: - externalObjects: {} - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/EnvironmentParametersProto.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/EnvironmentParametersProto.cs.meta deleted file mode 100644 index ba521fb57f..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/EnvironmentParametersProto.cs.meta +++ /dev/null @@ -1,11 +0,0 @@ -fileFormatVersion: 2 -guid: be8c5f75bdcff41488a8e85748541100 -MonoImporter: - externalObjects: {} - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/ResolutionProto.cs b/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/ResolutionProto.cs deleted file mode 100644 index da862cf256..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/ResolutionProto.cs +++ /dev/null @@ -1,231 +0,0 @@ -// -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: mlagents/envs/communicator_objects/resolution_proto.proto -// -#pragma warning disable 1591, 0612, 3021 -#region Designer generated code - -using pb = global::Google.Protobuf; -using pbc = global::Google.Protobuf.Collections; -using pbr = global::Google.Protobuf.Reflection; -using scg = global::System.Collections.Generic; -namespace MLAgents.CommunicatorObjects { - - /// Holder for reflection information generated from mlagents/envs/communicator_objects/resolution_proto.proto - public static partial class ResolutionProtoReflection { - - #region Descriptor - /// File descriptor for mlagents/envs/communicator_objects/resolution_proto.proto - public static pbr::FileDescriptor Descriptor { - get { return descriptor; } - } - private static pbr::FileDescriptor descriptor; - - static ResolutionProtoReflection() { - byte[] descriptorData = global::System.Convert.FromBase64String( - string.Concat( - "CjltbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3Jlc29sdXRp", - "b25fcHJvdG8ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzIkQKD1Jlc29s", - "dXRpb25Qcm90bxINCgV3aWR0aBgBIAEoBRIOCgZoZWlnaHQYAiABKAUSEgoK", - "Z3JheV9zY2FsZRgDIAEoCEIfqgIcTUxBZ2VudHMuQ29tbXVuaWNhdG9yT2Jq", - "ZWN0c2IGcHJvdG8z")); - descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, - new pbr::FileDescriptor[] { }, - new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { - new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.ResolutionProto), global::MLAgents.CommunicatorObjects.ResolutionProto.Parser, new[]{ "Width", "Height", "GrayScale" }, null, null, null) - })); - } - #endregion - - } - #region Messages - public sealed partial class ResolutionProto : pb::IMessage { - private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new ResolutionProto()); - private pb::UnknownFieldSet _unknownFields; - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public static pb::MessageParser Parser { get { return _parser; } } - - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public static pbr::MessageDescriptor Descriptor { - get { return global::MLAgents.CommunicatorObjects.ResolutionProtoReflection.Descriptor.MessageTypes[0]; } - } - - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - pbr::MessageDescriptor pb::IMessage.Descriptor { - get { return Descriptor; } - } - - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public ResolutionProto() { - OnConstruction(); - } - - partial void OnConstruction(); - - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public ResolutionProto(ResolutionProto other) : this() { - width_ = other.width_; - height_ = other.height_; - grayScale_ = other.grayScale_; - _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); - } - - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public ResolutionProto Clone() { - return new ResolutionProto(this); - } - - /// Field number for the "width" field. - public const int WidthFieldNumber = 1; - private int width_; - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public int Width { - get { return width_; } - set { - width_ = value; - } - } - - /// Field number for the "height" field. - public const int HeightFieldNumber = 2; - private int height_; - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public int Height { - get { return height_; } - set { - height_ = value; - } - } - - /// Field number for the "gray_scale" field. - public const int GrayScaleFieldNumber = 3; - private bool grayScale_; - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public bool GrayScale { - get { return grayScale_; } - set { - grayScale_ = value; - } - } - - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public override bool Equals(object other) { - return Equals(other as ResolutionProto); - } - - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public bool Equals(ResolutionProto other) { - if (ReferenceEquals(other, null)) { - return false; - } - if (ReferenceEquals(other, this)) { - return true; - } - if (Width != other.Width) return false; - if (Height != other.Height) return false; - if (GrayScale != other.GrayScale) return false; - return Equals(_unknownFields, other._unknownFields); - } - - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public override int GetHashCode() { - int hash = 1; - if (Width != 0) hash ^= Width.GetHashCode(); - if (Height != 0) hash ^= Height.GetHashCode(); - if (GrayScale != false) hash ^= GrayScale.GetHashCode(); - if (_unknownFields != null) { - hash ^= _unknownFields.GetHashCode(); - } - return hash; - } - - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public override string ToString() { - return pb::JsonFormatter.ToDiagnosticString(this); - } - - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public void WriteTo(pb::CodedOutputStream output) { - if (Width != 0) { - output.WriteRawTag(8); - output.WriteInt32(Width); - } - if (Height != 0) { - output.WriteRawTag(16); - output.WriteInt32(Height); - } - if (GrayScale != false) { - output.WriteRawTag(24); - output.WriteBool(GrayScale); - } - if (_unknownFields != null) { - _unknownFields.WriteTo(output); - } - } - - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public int CalculateSize() { - int size = 0; - if (Width != 0) { - size += 1 + pb::CodedOutputStream.ComputeInt32Size(Width); - } - if (Height != 0) { - size += 1 + pb::CodedOutputStream.ComputeInt32Size(Height); - } - if (GrayScale != false) { - size += 1 + 1; - } - if (_unknownFields != null) { - size += _unknownFields.CalculateSize(); - } - return size; - } - - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public void MergeFrom(ResolutionProto other) { - if (other == null) { - return; - } - if (other.Width != 0) { - Width = other.Width; - } - if (other.Height != 0) { - Height = other.Height; - } - if (other.GrayScale != false) { - GrayScale = other.GrayScale; - } - _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); - } - - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public void MergeFrom(pb::CodedInputStream input) { - uint tag; - while ((tag = input.ReadTag()) != 0) { - switch(tag) { - default: - _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); - break; - case 8: { - Width = input.ReadInt32(); - break; - } - case 16: { - Height = input.ReadInt32(); - break; - } - case 24: { - GrayScale = input.ReadBool(); - break; - } - } - } - } - - } - - #endregion - -} - -#endregion Designer generated code diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/ResolutionProto.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/ResolutionProto.cs.meta deleted file mode 100644 index b019d860af..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/ResolutionProto.cs.meta +++ /dev/null @@ -1,11 +0,0 @@ -fileFormatVersion: 2 -guid: eae234f817240444a9d18b3d7366f260 -MonoImporter: - externalObjects: {} - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/SpaceTypeProto.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/SpaceTypeProto.cs.meta deleted file mode 100644 index dcf5954210..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/SpaceTypeProto.cs.meta +++ /dev/null @@ -1,11 +0,0 @@ -fileFormatVersion: 2 -guid: 3e61637749b07412284363ff304da763 -MonoImporter: - externalObjects: {} - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Decision.cs b/UnitySDK/Assets/ML-Agents/Scripts/Decision.cs deleted file mode 100644 index e2a27072c5..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/Decision.cs +++ /dev/null @@ -1,55 +0,0 @@ -using System.Collections.Generic; -using UnityEngine; - -namespace MLAgents -{ - /// - /// Interface for implementing the behavior of an Agent that uses a Heuristic - /// Brain. The behavior of an Agent in this case is fully decided using the - /// implementation of these methods and no training or inference takes place. - /// Currently, the Heuristic Brain does not support text observations and actions. - /// - public abstract class Decision : ScriptableObject - { - public BrainParameters brainParameters; - - /// - /// Defines the decision-making logic of the agent. Given the information - /// about the agent, returns a vector of actions. - /// - /// Vector action vector. - /// The vector observations of the agent. - /// The cameras the agent uses for visual observations. - /// The reward the agent received at the previous step. - /// Whether or not the agent is done. - /// - /// The memories stored from the previous step with - /// - /// - public abstract float[] Decide( - List - vectorObs, - List visualObs, - float reward, - bool done, - List memory); - - /// - /// Defines the logic for creating the memory vector for the Agent. - /// - /// The vector of memories the agent will use at the next step. - /// The vector observations of the agent. - /// The cameras the agent uses for visual observations. - /// The reward the agent received at the previous step. - /// Whether or not the agent is done. - /// - /// The memories stored from the previous call to this method. - /// - public abstract List MakeMemory( - List vectorObs, - List visualObs, - float reward, - bool done, - List memory); - } -} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Decision.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Decision.cs.meta deleted file mode 100755 index 07dd93ea8d..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/Decision.cs.meta +++ /dev/null @@ -1,12 +0,0 @@ -fileFormatVersion: 2 -guid: 13e74744309fd4571b76e46fafc6d37f -timeCreated: 1503182472 -licenseType: Free -MonoImporter: - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Demonstration.cs b/UnitySDK/Assets/ML-Agents/Scripts/Demonstration.cs index cf8a948696..f6722451fb 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/Demonstration.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Demonstration.cs @@ -1,5 +1,4 @@ using System; -using MLAgents.CommunicatorObjects; using UnityEngine; namespace MLAgents @@ -34,43 +33,5 @@ public class DemonstrationMetaData public float meanReward; public string demonstrationName; public const int ApiVersion = 1; - - /// - /// Constructor for initializing metadata to default values. - /// - public DemonstrationMetaData() - { - } - - /// - /// Initialize metadata values based on proto object. - /// - public DemonstrationMetaData(DemonstrationMetaProto demoProto) - { - numberEpisodes = demoProto.NumberEpisodes; - numberExperiences = demoProto.NumberSteps; - meanReward = demoProto.MeanReward; - demonstrationName = demoProto.DemonstrationName; - if (demoProto.ApiVersion != ApiVersion) - { - throw new Exception("API versions of demonstration are incompatible."); - } - } - - /// - /// Convert metadata object to proto object. - /// - public DemonstrationMetaProto ToProto() - { - var demoProto = new DemonstrationMetaProto - { - ApiVersion = ApiVersion, - MeanReward = meanReward, - NumberSteps = numberExperiences, - NumberEpisodes = numberEpisodes, - DemonstrationName = demonstrationName - }; - return demoProto; - } } } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/DemonstrationRecorder.cs b/UnitySDK/Assets/ML-Agents/Scripts/DemonstrationRecorder.cs index 715f20852d..01c0d77d9d 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/DemonstrationRecorder.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/DemonstrationRecorder.cs @@ -42,8 +42,8 @@ private void InitializeDemoStore() demonstrationName = SanitizeName(demonstrationName, MaxNameLength); m_DemoStore.Initialize( demonstrationName, - m_RecordingAgent.brain.brainParameters, - m_RecordingAgent.brain.name); + GetComponent().brainParameters, + GetComponent().behaviorName); Monitor.Log("Recording Demonstration of Agent: ", m_RecordingAgent.name); } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Grpc.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc.meta new file mode 100644 index 0000000000..f9d48bfc0f --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: 418327e202c7464bb6649d025df1b539 +timeCreated: 1569444731 \ No newline at end of file diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects.meta similarity index 100% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects.meta rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects.meta diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/AgentActionProto.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentAction.cs similarity index 87% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/AgentActionProto.cs rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentAction.cs index 7cdbe34fe9..33a893aa9d 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/AgentActionProto.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentAction.cs @@ -1,6 +1,6 @@ // // Generated by the protocol buffer compiler. DO NOT EDIT! -// source: mlagents/envs/communicator_objects/agent_action_proto.proto +// source: mlagents/envs/communicator_objects/agent_action.proto // #pragma warning disable 1591, 0612, 3021 #region Designer generated code @@ -11,27 +11,27 @@ using scg = global::System.Collections.Generic; namespace MLAgents.CommunicatorObjects { - /// Holder for reflection information generated from mlagents/envs/communicator_objects/agent_action_proto.proto - public static partial class AgentActionProtoReflection { + /// Holder for reflection information generated from mlagents/envs/communicator_objects/agent_action.proto + public static partial class AgentActionReflection { #region Descriptor - /// File descriptor for mlagents/envs/communicator_objects/agent_action_proto.proto + /// File descriptor for mlagents/envs/communicator_objects/agent_action.proto public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; - static AgentActionProtoReflection() { + static AgentActionReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( - "CjttbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2FnZW50X2Fj", - "dGlvbl9wcm90by5wcm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMaNm1sYWdl", - "bnRzL2VudnMvY29tbXVuaWNhdG9yX29iamVjdHMvY3VzdG9tX2FjdGlvbi5w", - "cm90byKcAQoQQWdlbnRBY3Rpb25Qcm90bxIWCg52ZWN0b3JfYWN0aW9ucxgB", - "IAMoAhIUCgx0ZXh0X2FjdGlvbnMYAiABKAkSEAoIbWVtb3JpZXMYAyADKAIS", - "DQoFdmFsdWUYBCABKAISOQoNY3VzdG9tX2FjdGlvbhgFIAEoCzIiLmNvbW11", - "bmljYXRvcl9vYmplY3RzLkN1c3RvbUFjdGlvbkIfqgIcTUxBZ2VudHMuQ29t", - "bXVuaWNhdG9yT2JqZWN0c2IGcHJvdG8z")); + "CjVtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2FnZW50X2Fj", + "dGlvbi5wcm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMaNm1sYWdlbnRzL2Vu", + "dnMvY29tbXVuaWNhdG9yX29iamVjdHMvY3VzdG9tX2FjdGlvbi5wcm90byKh", + "AQoQQWdlbnRBY3Rpb25Qcm90bxIWCg52ZWN0b3JfYWN0aW9ucxgBIAMoAhIU", + "Cgx0ZXh0X2FjdGlvbnMYAiABKAkSEAoIbWVtb3JpZXMYAyADKAISDQoFdmFs", + "dWUYBCABKAISPgoNY3VzdG9tX2FjdGlvbhgFIAEoCzInLmNvbW11bmljYXRv", + "cl9vYmplY3RzLkN1c3RvbUFjdGlvblByb3RvQh+qAhxNTEFnZW50cy5Db21t", + "dW5pY2F0b3JPYmplY3RzYgZwcm90bzM=")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.CustomActionReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { @@ -50,7 +50,7 @@ public sealed partial class AgentActionProto : pb::IMessage { [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { - get { return global::MLAgents.CommunicatorObjects.AgentActionProtoReflection.Descriptor.MessageTypes[0]; } + get { return global::MLAgents.CommunicatorObjects.AgentActionReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] @@ -71,7 +71,7 @@ public AgentActionProto(AgentActionProto other) : this() { textActions_ = other.textActions_; memories_ = other.memories_.Clone(); value_ = other.value_; - customAction_ = other.customAction_ != null ? other.customAction_.Clone() : null; + CustomAction = other.customAction_ != null ? other.CustomAction.Clone() : null; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } @@ -124,9 +124,9 @@ public float Value { /// Field number for the "custom_action" field. public const int CustomActionFieldNumber = 5; - private global::MLAgents.CommunicatorObjects.CustomAction customAction_; + private global::MLAgents.CommunicatorObjects.CustomActionProto customAction_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public global::MLAgents.CommunicatorObjects.CustomAction CustomAction { + public global::MLAgents.CommunicatorObjects.CustomActionProto CustomAction { get { return customAction_; } set { customAction_ = value; @@ -229,7 +229,7 @@ public void MergeFrom(AgentActionProto other) { } if (other.customAction_ != null) { if (customAction_ == null) { - CustomAction = new global::MLAgents.CommunicatorObjects.CustomAction(); + customAction_ = new global::MLAgents.CommunicatorObjects.CustomActionProto(); } CustomAction.MergeFrom(other.CustomAction); } @@ -264,9 +264,9 @@ public void MergeFrom(pb::CodedInputStream input) { } case 42: { if (customAction_ == null) { - CustomAction = new global::MLAgents.CommunicatorObjects.CustomAction(); + customAction_ = new global::MLAgents.CommunicatorObjects.CustomActionProto(); } - input.ReadMessage(CustomAction); + input.ReadMessage(customAction_); break; } } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/BrainParametersProto.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentAction.cs.meta similarity index 83% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/BrainParametersProto.cs.meta rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentAction.cs.meta index e3ee7bcab6..f47d94375b 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/BrainParametersProto.cs.meta +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentAction.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 7b41acc4d406e4a3c94df3399b2a6471 +guid: b1fa94db54b734224927bb4b322227cd MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/AgentInfoProto.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentInfo.cs similarity index 81% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/AgentInfoProto.cs rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentInfo.cs index aa98fc2afe..dfe9158fd9 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/AgentInfoProto.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentInfo.cs @@ -1,6 +1,6 @@ // // Generated by the protocol buffer compiler. DO NOT EDIT! -// source: mlagents/envs/communicator_objects/agent_info_proto.proto +// source: mlagents/envs/communicator_objects/agent_info.proto // #pragma warning disable 1591, 0612, 3021 #region Designer generated code @@ -11,35 +11,38 @@ using scg = global::System.Collections.Generic; namespace MLAgents.CommunicatorObjects { - /// Holder for reflection information generated from mlagents/envs/communicator_objects/agent_info_proto.proto - public static partial class AgentInfoProtoReflection { + /// Holder for reflection information generated from mlagents/envs/communicator_objects/agent_info.proto + public static partial class AgentInfoReflection { #region Descriptor - /// File descriptor for mlagents/envs/communicator_objects/agent_info_proto.proto + /// File descriptor for mlagents/envs/communicator_objects/agent_info.proto public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; - static AgentInfoProtoReflection() { + static AgentInfoReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( - "CjltbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2FnZW50X2lu", - "Zm9fcHJvdG8ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzGjttbGFnZW50", - "cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2N1c3RvbV9vYnNlcnZhdGlv", - "bi5wcm90byLXAgoOQWdlbnRJbmZvUHJvdG8SIgoac3RhY2tlZF92ZWN0b3Jf", - "b2JzZXJ2YXRpb24YASADKAISGwoTdmlzdWFsX29ic2VydmF0aW9ucxgCIAMo", - "DBIYChB0ZXh0X29ic2VydmF0aW9uGAMgASgJEh0KFXN0b3JlZF92ZWN0b3Jf", - "YWN0aW9ucxgEIAMoAhIbChNzdG9yZWRfdGV4dF9hY3Rpb25zGAUgASgJEhAK", - "CG1lbW9yaWVzGAYgAygCEg4KBnJld2FyZBgHIAEoAhIMCgRkb25lGAggASgI", - "EhgKEG1heF9zdGVwX3JlYWNoZWQYCSABKAgSCgoCaWQYCiABKAUSEwoLYWN0", - "aW9uX21hc2sYCyADKAgSQwoSY3VzdG9tX29ic2VydmF0aW9uGAwgASgLMicu", - "Y29tbXVuaWNhdG9yX29iamVjdHMuQ3VzdG9tT2JzZXJ2YXRpb25CH6oCHE1M", - "QWdlbnRzLkNvbW11bmljYXRvck9iamVjdHNiBnByb3RvMw==")); + "CjNtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2FnZW50X2lu", + "Zm8ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzGj9tbGFnZW50cy9lbnZz", + "L2NvbW11bmljYXRvcl9vYmplY3RzL2NvbXByZXNzZWRfb2JzZXJ2YXRpb24u", + "cHJvdG8aO21sYWdlbnRzL2VudnMvY29tbXVuaWNhdG9yX29iamVjdHMvY3Vz", + "dG9tX29ic2VydmF0aW9uLnByb3RvIpgDCg5BZ2VudEluZm9Qcm90bxIiChpz", + "dGFja2VkX3ZlY3Rvcl9vYnNlcnZhdGlvbhgBIAMoAhIYChB0ZXh0X29ic2Vy", + "dmF0aW9uGAMgASgJEh0KFXN0b3JlZF92ZWN0b3JfYWN0aW9ucxgEIAMoAhIb", + "ChNzdG9yZWRfdGV4dF9hY3Rpb25zGAUgASgJEhAKCG1lbW9yaWVzGAYgAygC", + "Eg4KBnJld2FyZBgHIAEoAhIMCgRkb25lGAggASgIEhgKEG1heF9zdGVwX3Jl", + "YWNoZWQYCSABKAgSCgoCaWQYCiABKAUSEwoLYWN0aW9uX21hc2sYCyADKAgS", + "SAoSY3VzdG9tX29ic2VydmF0aW9uGAwgASgLMiwuY29tbXVuaWNhdG9yX29i", + "amVjdHMuQ3VzdG9tT2JzZXJ2YXRpb25Qcm90bxJRChdjb21wcmVzc2VkX29i", + "c2VydmF0aW9ucxgNIAMoCzIwLmNvbW11bmljYXRvcl9vYmplY3RzLkNvbXBy", + "ZXNzZWRPYnNlcnZhdGlvblByb3RvSgQIAhADQh+qAhxNTEFnZW50cy5Db21t", + "dW5pY2F0b3JPYmplY3RzYgZwcm90bzM=")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, - new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.CustomObservationReflection.Descriptor, }, + new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.CompressedObservationReflection.Descriptor, global::MLAgents.CommunicatorObjects.CustomObservationReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { - new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.AgentInfoProto), global::MLAgents.CommunicatorObjects.AgentInfoProto.Parser, new[]{ "StackedVectorObservation", "VisualObservations", "TextObservation", "StoredVectorActions", "StoredTextActions", "Memories", "Reward", "Done", "MaxStepReached", "Id", "ActionMask", "CustomObservation" }, null, null, null) + new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.AgentInfoProto), global::MLAgents.CommunicatorObjects.AgentInfoProto.Parser, new[]{ "StackedVectorObservation", "TextObservation", "StoredVectorActions", "StoredTextActions", "Memories", "Reward", "Done", "MaxStepReached", "Id", "ActionMask", "CustomObservation", "CompressedObservations" }, null, null, null) })); } #endregion @@ -54,7 +57,7 @@ public sealed partial class AgentInfoProto : pb::IMessage { [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { - get { return global::MLAgents.CommunicatorObjects.AgentInfoProtoReflection.Descriptor.MessageTypes[0]; } + get { return global::MLAgents.CommunicatorObjects.AgentInfoReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] @@ -72,7 +75,6 @@ public AgentInfoProto() { [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public AgentInfoProto(AgentInfoProto other) : this() { stackedVectorObservation_ = other.stackedVectorObservation_.Clone(); - visualObservations_ = other.visualObservations_.Clone(); textObservation_ = other.textObservation_; storedVectorActions_ = other.storedVectorActions_.Clone(); storedTextActions_ = other.storedTextActions_; @@ -82,7 +84,8 @@ public AgentInfoProto(AgentInfoProto other) : this() { maxStepReached_ = other.maxStepReached_; id_ = other.id_; actionMask_ = other.actionMask_.Clone(); - customObservation_ = other.customObservation_ != null ? other.customObservation_.Clone() : null; + CustomObservation = other.customObservation_ != null ? other.CustomObservation.Clone() : null; + compressedObservations_ = other.compressedObservations_.Clone(); _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } @@ -101,16 +104,6 @@ public AgentInfoProto Clone() { get { return stackedVectorObservation_; } } - /// Field number for the "visual_observations" field. - public const int VisualObservationsFieldNumber = 2; - private static readonly pb::FieldCodec _repeated_visualObservations_codec - = pb::FieldCodec.ForBytes(18); - private readonly pbc::RepeatedField visualObservations_ = new pbc::RepeatedField(); - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public pbc::RepeatedField VisualObservations { - get { return visualObservations_; } - } - /// Field number for the "text_observation" field. public const int TextObservationFieldNumber = 3; private string textObservation_ = ""; @@ -209,15 +202,25 @@ public int Id { /// Field number for the "custom_observation" field. public const int CustomObservationFieldNumber = 12; - private global::MLAgents.CommunicatorObjects.CustomObservation customObservation_; + private global::MLAgents.CommunicatorObjects.CustomObservationProto customObservation_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public global::MLAgents.CommunicatorObjects.CustomObservation CustomObservation { + public global::MLAgents.CommunicatorObjects.CustomObservationProto CustomObservation { get { return customObservation_; } set { customObservation_ = value; } } + /// Field number for the "compressed_observations" field. + public const int CompressedObservationsFieldNumber = 13; + private static readonly pb::FieldCodec _repeated_compressedObservations_codec + = pb::FieldCodec.ForMessage(106, global::MLAgents.CommunicatorObjects.CompressedObservationProto.Parser); + private readonly pbc::RepeatedField compressedObservations_ = new pbc::RepeatedField(); + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public pbc::RepeatedField CompressedObservations { + get { return compressedObservations_; } + } + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as AgentInfoProto); @@ -232,7 +235,6 @@ public bool Equals(AgentInfoProto other) { return true; } if(!stackedVectorObservation_.Equals(other.stackedVectorObservation_)) return false; - if(!visualObservations_.Equals(other.visualObservations_)) return false; if (TextObservation != other.TextObservation) return false; if(!storedVectorActions_.Equals(other.storedVectorActions_)) return false; if (StoredTextActions != other.StoredTextActions) return false; @@ -243,6 +245,7 @@ public bool Equals(AgentInfoProto other) { if (Id != other.Id) return false; if(!actionMask_.Equals(other.actionMask_)) return false; if (!object.Equals(CustomObservation, other.CustomObservation)) return false; + if(!compressedObservations_.Equals(other.compressedObservations_)) return false; return Equals(_unknownFields, other._unknownFields); } @@ -250,7 +253,6 @@ public bool Equals(AgentInfoProto other) { public override int GetHashCode() { int hash = 1; hash ^= stackedVectorObservation_.GetHashCode(); - hash ^= visualObservations_.GetHashCode(); if (TextObservation.Length != 0) hash ^= TextObservation.GetHashCode(); hash ^= storedVectorActions_.GetHashCode(); if (StoredTextActions.Length != 0) hash ^= StoredTextActions.GetHashCode(); @@ -261,6 +263,7 @@ public override int GetHashCode() { if (Id != 0) hash ^= Id.GetHashCode(); hash ^= actionMask_.GetHashCode(); if (customObservation_ != null) hash ^= CustomObservation.GetHashCode(); + hash ^= compressedObservations_.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } @@ -275,7 +278,6 @@ public override string ToString() { [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { stackedVectorObservation_.WriteTo(output, _repeated_stackedVectorObservation_codec); - visualObservations_.WriteTo(output, _repeated_visualObservations_codec); if (TextObservation.Length != 0) { output.WriteRawTag(26); output.WriteString(TextObservation); @@ -307,6 +309,7 @@ public void WriteTo(pb::CodedOutputStream output) { output.WriteRawTag(98); output.WriteMessage(CustomObservation); } + compressedObservations_.WriteTo(output, _repeated_compressedObservations_codec); if (_unknownFields != null) { _unknownFields.WriteTo(output); } @@ -316,7 +319,6 @@ public void WriteTo(pb::CodedOutputStream output) { public int CalculateSize() { int size = 0; size += stackedVectorObservation_.CalculateSize(_repeated_stackedVectorObservation_codec); - size += visualObservations_.CalculateSize(_repeated_visualObservations_codec); if (TextObservation.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(TextObservation); } @@ -341,6 +343,7 @@ public int CalculateSize() { if (customObservation_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(CustomObservation); } + size += compressedObservations_.CalculateSize(_repeated_compressedObservations_codec); if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } @@ -353,7 +356,6 @@ public void MergeFrom(AgentInfoProto other) { return; } stackedVectorObservation_.Add(other.stackedVectorObservation_); - visualObservations_.Add(other.visualObservations_); if (other.TextObservation.Length != 0) { TextObservation = other.TextObservation; } @@ -377,10 +379,11 @@ public void MergeFrom(AgentInfoProto other) { actionMask_.Add(other.actionMask_); if (other.customObservation_ != null) { if (customObservation_ == null) { - CustomObservation = new global::MLAgents.CommunicatorObjects.CustomObservation(); + customObservation_ = new global::MLAgents.CommunicatorObjects.CustomObservationProto(); } CustomObservation.MergeFrom(other.CustomObservation); } + compressedObservations_.Add(other.compressedObservations_); _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } @@ -397,10 +400,6 @@ public void MergeFrom(pb::CodedInputStream input) { stackedVectorObservation_.AddEntriesFrom(input, _repeated_stackedVectorObservation_codec); break; } - case 18: { - visualObservations_.AddEntriesFrom(input, _repeated_visualObservations_codec); - break; - } case 26: { TextObservation = input.ReadString(); break; @@ -442,9 +441,13 @@ public void MergeFrom(pb::CodedInputStream input) { } case 98: { if (customObservation_ == null) { - CustomObservation = new global::MLAgents.CommunicatorObjects.CustomObservation(); + customObservation_ = new global::MLAgents.CommunicatorObjects.CustomObservationProto(); } - input.ReadMessage(CustomObservation); + input.ReadMessage(customObservation_); + break; + } + case 106: { + compressedObservations_.AddEntriesFrom(input, _repeated_compressedObservations_codec); break; } } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentInfo.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentInfo.cs.meta new file mode 100644 index 0000000000..07ed361456 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/AgentInfo.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: ecaddd3a8141a4854a4d2c7fe8bd6a75 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/BrainParametersProto.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/BrainParameters.cs similarity index 80% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/BrainParametersProto.cs rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/BrainParameters.cs index a6c93af551..4d92bcb92c 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/BrainParametersProto.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/BrainParameters.cs @@ -1,6 +1,6 @@ // // Generated by the protocol buffer compiler. DO NOT EDIT! -// source: mlagents/envs/communicator_objects/brain_parameters_proto.proto +// source: mlagents/envs/communicator_objects/brain_parameters.proto // #pragma warning disable 1591, 0612, 3021 #region Designer generated code @@ -11,37 +11,34 @@ using scg = global::System.Collections.Generic; namespace MLAgents.CommunicatorObjects { - /// Holder for reflection information generated from mlagents/envs/communicator_objects/brain_parameters_proto.proto - public static partial class BrainParametersProtoReflection { + /// Holder for reflection information generated from mlagents/envs/communicator_objects/brain_parameters.proto + public static partial class BrainParametersReflection { #region Descriptor - /// File descriptor for mlagents/envs/communicator_objects/brain_parameters_proto.proto + /// File descriptor for mlagents/envs/communicator_objects/brain_parameters.proto public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; - static BrainParametersProtoReflection() { + static BrainParametersReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( - "Cj9tbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2JyYWluX3Bh", - "cmFtZXRlcnNfcHJvdG8ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzGjlt", - "bGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3Jlc29sdXRpb25f", - "cHJvdG8ucHJvdG8aOW1sYWdlbnRzL2VudnMvY29tbXVuaWNhdG9yX29iamVj", - "dHMvc3BhY2VfdHlwZV9wcm90by5wcm90byLUAgoUQnJhaW5QYXJhbWV0ZXJz", - "UHJvdG8SHwoXdmVjdG9yX29ic2VydmF0aW9uX3NpemUYASABKAUSJwofbnVt", - "X3N0YWNrZWRfdmVjdG9yX29ic2VydmF0aW9ucxgCIAEoBRIaChJ2ZWN0b3Jf", - "YWN0aW9uX3NpemUYAyADKAUSQQoSY2FtZXJhX3Jlc29sdXRpb25zGAQgAygL", - "MiUuY29tbXVuaWNhdG9yX29iamVjdHMuUmVzb2x1dGlvblByb3RvEiIKGnZl", - "Y3Rvcl9hY3Rpb25fZGVzY3JpcHRpb25zGAUgAygJEkYKGHZlY3Rvcl9hY3Rp", - "b25fc3BhY2VfdHlwZRgGIAEoDjIkLmNvbW11bmljYXRvcl9vYmplY3RzLlNw", - "YWNlVHlwZVByb3RvEhIKCmJyYWluX25hbWUYByABKAkSEwoLaXNfdHJhaW5p", - "bmcYCCABKAhCH6oCHE1MQWdlbnRzLkNvbW11bmljYXRvck9iamVjdHNiBnBy", - "b3RvMw==")); + "CjltbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2JyYWluX3Bh", + "cmFtZXRlcnMucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzGjNtbGFnZW50", + "cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3NwYWNlX3R5cGUucHJvdG8i", + "lwIKFEJyYWluUGFyYW1ldGVyc1Byb3RvEh8KF3ZlY3Rvcl9vYnNlcnZhdGlv", + "bl9zaXplGAEgASgFEicKH251bV9zdGFja2VkX3ZlY3Rvcl9vYnNlcnZhdGlv", + "bnMYAiABKAUSGgoSdmVjdG9yX2FjdGlvbl9zaXplGAMgAygFEiIKGnZlY3Rv", + "cl9hY3Rpb25fZGVzY3JpcHRpb25zGAUgAygJEkYKGHZlY3Rvcl9hY3Rpb25f", + "c3BhY2VfdHlwZRgGIAEoDjIkLmNvbW11bmljYXRvcl9vYmplY3RzLlNwYWNl", + "VHlwZVByb3RvEhIKCmJyYWluX25hbWUYByABKAkSEwoLaXNfdHJhaW5pbmcY", + "CCABKAhKBAgEEAVCH6oCHE1MQWdlbnRzLkNvbW11bmljYXRvck9iamVjdHNi", + "BnByb3RvMw==")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, - new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.ResolutionProtoReflection.Descriptor, global::MLAgents.CommunicatorObjects.SpaceTypeProtoReflection.Descriptor, }, + new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.SpaceTypeReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { - new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.BrainParametersProto), global::MLAgents.CommunicatorObjects.BrainParametersProto.Parser, new[]{ "VectorObservationSize", "NumStackedVectorObservations", "VectorActionSize", "CameraResolutions", "VectorActionDescriptions", "VectorActionSpaceType", "BrainName", "IsTraining" }, null, null, null) + new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.BrainParametersProto), global::MLAgents.CommunicatorObjects.BrainParametersProto.Parser, new[]{ "VectorObservationSize", "NumStackedVectorObservations", "VectorActionSize", "VectorActionDescriptions", "VectorActionSpaceType", "BrainName", "IsTraining" }, null, null, null) })); } #endregion @@ -56,7 +53,7 @@ public sealed partial class BrainParametersProto : pb::IMessageField number for the "camera_resolutions" field. - public const int CameraResolutionsFieldNumber = 4; - private static readonly pb::FieldCodec _repeated_cameraResolutions_codec - = pb::FieldCodec.ForMessage(34, global::MLAgents.CommunicatorObjects.ResolutionProto.Parser); - private readonly pbc::RepeatedField cameraResolutions_ = new pbc::RepeatedField(); - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public pbc::RepeatedField CameraResolutions { - get { return cameraResolutions_; } - } - /// Field number for the "vector_action_descriptions" field. public const int VectorActionDescriptionsFieldNumber = 5; private static readonly pb::FieldCodec _repeated_vectorActionDescriptions_codec @@ -190,7 +176,6 @@ public bool Equals(BrainParametersProto other) { if (VectorObservationSize != other.VectorObservationSize) return false; if (NumStackedVectorObservations != other.NumStackedVectorObservations) return false; if(!vectorActionSize_.Equals(other.vectorActionSize_)) return false; - if(!cameraResolutions_.Equals(other.cameraResolutions_)) return false; if(!vectorActionDescriptions_.Equals(other.vectorActionDescriptions_)) return false; if (VectorActionSpaceType != other.VectorActionSpaceType) return false; if (BrainName != other.BrainName) return false; @@ -204,7 +189,6 @@ public override int GetHashCode() { if (VectorObservationSize != 0) hash ^= VectorObservationSize.GetHashCode(); if (NumStackedVectorObservations != 0) hash ^= NumStackedVectorObservations.GetHashCode(); hash ^= vectorActionSize_.GetHashCode(); - hash ^= cameraResolutions_.GetHashCode(); hash ^= vectorActionDescriptions_.GetHashCode(); if (VectorActionSpaceType != 0) hash ^= VectorActionSpaceType.GetHashCode(); if (BrainName.Length != 0) hash ^= BrainName.GetHashCode(); @@ -231,7 +215,6 @@ public void WriteTo(pb::CodedOutputStream output) { output.WriteInt32(NumStackedVectorObservations); } vectorActionSize_.WriteTo(output, _repeated_vectorActionSize_codec); - cameraResolutions_.WriteTo(output, _repeated_cameraResolutions_codec); vectorActionDescriptions_.WriteTo(output, _repeated_vectorActionDescriptions_codec); if (VectorActionSpaceType != 0) { output.WriteRawTag(48); @@ -260,7 +243,6 @@ public int CalculateSize() { size += 1 + pb::CodedOutputStream.ComputeInt32Size(NumStackedVectorObservations); } size += vectorActionSize_.CalculateSize(_repeated_vectorActionSize_codec); - size += cameraResolutions_.CalculateSize(_repeated_cameraResolutions_codec); size += vectorActionDescriptions_.CalculateSize(_repeated_vectorActionDescriptions_codec); if (VectorActionSpaceType != 0) { size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) VectorActionSpaceType); @@ -289,7 +271,6 @@ public void MergeFrom(BrainParametersProto other) { NumStackedVectorObservations = other.NumStackedVectorObservations; } vectorActionSize_.Add(other.vectorActionSize_); - cameraResolutions_.Add(other.cameraResolutions_); vectorActionDescriptions_.Add(other.vectorActionDescriptions_); if (other.VectorActionSpaceType != 0) { VectorActionSpaceType = other.VectorActionSpaceType; @@ -324,16 +305,12 @@ public void MergeFrom(pb::CodedInputStream input) { vectorActionSize_.AddEntriesFrom(input, _repeated_vectorActionSize_codec); break; } - case 34: { - cameraResolutions_.AddEntriesFrom(input, _repeated_cameraResolutions_codec); - break; - } case 42: { vectorActionDescriptions_.AddEntriesFrom(input, _repeated_vectorActionDescriptions_codec); break; } case 48: { - VectorActionSpaceType = (global::MLAgents.CommunicatorObjects.SpaceTypeProto) input.ReadEnum(); + vectorActionSpaceType_ = (global::MLAgents.CommunicatorObjects.SpaceTypeProto) input.ReadEnum(); break; } case 58: { diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/BrainParameters.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/BrainParameters.cs.meta new file mode 100644 index 0000000000..447602fcc2 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/BrainParameters.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 26f9a93df956e4ee88c1cf5f31017f0e +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CommandProto.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/Command.cs similarity index 69% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CommandProto.cs rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/Command.cs index c1738800da..0d21e5b5c5 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CommandProto.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/Command.cs @@ -1,6 +1,6 @@ // // Generated by the protocol buffer compiler. DO NOT EDIT! -// source: mlagents/envs/communicator_objects/command_proto.proto +// source: mlagents/envs/communicator_objects/command.proto // #pragma warning disable 1591, 0612, 3021 #region Designer generated code @@ -11,23 +11,23 @@ using scg = global::System.Collections.Generic; namespace MLAgents.CommunicatorObjects { - /// Holder for reflection information generated from mlagents/envs/communicator_objects/command_proto.proto - public static partial class CommandProtoReflection { + /// Holder for reflection information generated from mlagents/envs/communicator_objects/command.proto + public static partial class CommandReflection { #region Descriptor - /// File descriptor for mlagents/envs/communicator_objects/command_proto.proto + /// File descriptor for mlagents/envs/communicator_objects/command.proto public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; - static CommandProtoReflection() { + static CommandReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( - "CjZtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2NvbW1hbmRf", - "cHJvdG8ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzKi0KDENvbW1hbmRQ", - "cm90bxIICgRTVEVQEAASCQoFUkVTRVQQARIICgRRVUlUEAJCH6oCHE1MQWdl", - "bnRzLkNvbW11bmljYXRvck9iamVjdHNiBnByb3RvMw==")); + "CjBtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2NvbW1hbmQu", + "cHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzKi0KDENvbW1hbmRQcm90bxII", + "CgRTVEVQEAASCQoFUkVTRVQQARIICgRRVUlUEAJCH6oCHE1MQWdlbnRzLkNv", + "bW11bmljYXRvck9iamVjdHNiBnByb3RvMw==")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { }, new pbr::GeneratedClrTypeInfo(new[] {typeof(global::MLAgents.CommunicatorObjects.CommandProto), }, null)); diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/Command.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/Command.cs.meta new file mode 100644 index 0000000000..f47033a7c1 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/Command.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 9be6f5025f61540eabbc831436642adc +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CompressedObservation.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CompressedObservation.cs new file mode 100644 index 0000000000..1b7a0f9296 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CompressedObservation.cs @@ -0,0 +1,234 @@ +// +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: mlagents/envs/communicator_objects/compressed_observation.proto +// +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace MLAgents.CommunicatorObjects { + + /// Holder for reflection information generated from mlagents/envs/communicator_objects/compressed_observation.proto + public static partial class CompressedObservationReflection { + + #region Descriptor + /// File descriptor for mlagents/envs/communicator_objects/compressed_observation.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static CompressedObservationReflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "Cj9tbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2NvbXByZXNz", + "ZWRfb2JzZXJ2YXRpb24ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzIn8K", + "GkNvbXByZXNzZWRPYnNlcnZhdGlvblByb3RvEg0KBXNoYXBlGAEgAygFEkQK", + "EGNvbXByZXNzaW9uX3R5cGUYAiABKA4yKi5jb21tdW5pY2F0b3Jfb2JqZWN0", + "cy5Db21wcmVzc2lvblR5cGVQcm90bxIMCgRkYXRhGAMgASgMKikKFENvbXBy", + "ZXNzaW9uVHlwZVByb3RvEggKBE5PTkUQABIHCgNQTkcQAUIfqgIcTUxBZ2Vu", + "dHMuQ29tbXVuaWNhdG9yT2JqZWN0c2IGcHJvdG8z")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { }, + new pbr::GeneratedClrTypeInfo(new[] {typeof(global::MLAgents.CommunicatorObjects.CompressionTypeProto), }, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.CompressedObservationProto), global::MLAgents.CommunicatorObjects.CompressedObservationProto.Parser, new[]{ "Shape", "CompressionType", "Data" }, null, null, null) + })); + } + #endregion + + } + #region Enums + public enum CompressionTypeProto { + [pbr::OriginalName("NONE")] None = 0, + [pbr::OriginalName("PNG")] Png = 1, + } + + #endregion + + #region Messages + public sealed partial class CompressedObservationProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new CompressedObservationProto()); + private pb::UnknownFieldSet _unknownFields; + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public static pb::MessageParser Parser { get { return _parser; } } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public static pbr::MessageDescriptor Descriptor { + get { return global::MLAgents.CommunicatorObjects.CompressedObservationReflection.Descriptor.MessageTypes[0]; } + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public CompressedObservationProto() { + OnConstruction(); + } + + partial void OnConstruction(); + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public CompressedObservationProto(CompressedObservationProto other) : this() { + shape_ = other.shape_.Clone(); + compressionType_ = other.compressionType_; + data_ = other.data_; + _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public CompressedObservationProto Clone() { + return new CompressedObservationProto(this); + } + + /// Field number for the "shape" field. + public const int ShapeFieldNumber = 1; + private static readonly pb::FieldCodec _repeated_shape_codec + = pb::FieldCodec.ForInt32(10); + private readonly pbc::RepeatedField shape_ = new pbc::RepeatedField(); + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public pbc::RepeatedField Shape { + get { return shape_; } + } + + /// Field number for the "compression_type" field. + public const int CompressionTypeFieldNumber = 2; + private global::MLAgents.CommunicatorObjects.CompressionTypeProto compressionType_ = 0; + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public global::MLAgents.CommunicatorObjects.CompressionTypeProto CompressionType { + get { return compressionType_; } + set { + compressionType_ = value; + } + } + + /// Field number for the "data" field. + public const int DataFieldNumber = 3; + private pb::ByteString data_ = pb::ByteString.Empty; + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public pb::ByteString Data { + get { return data_; } + set { + data_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public override bool Equals(object other) { + return Equals(other as CompressedObservationProto); + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public bool Equals(CompressedObservationProto other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if(!shape_.Equals(other.shape_)) return false; + if (CompressionType != other.CompressionType) return false; + if (Data != other.Data) return false; + return Equals(_unknownFields, other._unknownFields); + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public override int GetHashCode() { + int hash = 1; + hash ^= shape_.GetHashCode(); + if (CompressionType != 0) hash ^= CompressionType.GetHashCode(); + if (Data.Length != 0) hash ^= Data.GetHashCode(); + if (_unknownFields != null) { + hash ^= _unknownFields.GetHashCode(); + } + return hash; + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public void WriteTo(pb::CodedOutputStream output) { + shape_.WriteTo(output, _repeated_shape_codec); + if (CompressionType != 0) { + output.WriteRawTag(16); + output.WriteEnum((int) CompressionType); + } + if (Data.Length != 0) { + output.WriteRawTag(26); + output.WriteBytes(Data); + } + if (_unknownFields != null) { + _unknownFields.WriteTo(output); + } + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public int CalculateSize() { + int size = 0; + size += shape_.CalculateSize(_repeated_shape_codec); + if (CompressionType != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) CompressionType); + } + if (Data.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeBytesSize(Data); + } + if (_unknownFields != null) { + size += _unknownFields.CalculateSize(); + } + return size; + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public void MergeFrom(CompressedObservationProto other) { + if (other == null) { + return; + } + shape_.Add(other.shape_); + if (other.CompressionType != 0) { + CompressionType = other.CompressionType; + } + if (other.Data.Length != 0) { + Data = other.Data; + } + _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute] + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); + break; + case 10: + case 8: { + shape_.AddEntriesFrom(input, _repeated_shape_codec); + break; + } + case 16: { + compressionType_ = (global::MLAgents.CommunicatorObjects.CompressionTypeProto) input.ReadEnum(); + break; + } + case 26: { + Data = input.ReadBytes(); + break; + } + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CompressedObservation.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CompressedObservation.cs.meta new file mode 100644 index 0000000000..8bb01e7651 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CompressedObservation.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 55ac40ee8d5b74b9e80d3def9d4ef6e0 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CustomAction.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CustomAction.cs similarity index 81% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CustomAction.cs rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CustomAction.cs index 4053f0beb3..fe98b8d171 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CustomAction.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CustomAction.cs @@ -25,23 +25,24 @@ static CustomActionReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "CjZtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2N1c3RvbV9h", - "Y3Rpb24ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzIg4KDEN1c3RvbUFj", - "dGlvbkIfqgIcTUxBZ2VudHMuQ29tbXVuaWNhdG9yT2JqZWN0c2IGcHJvdG8z")); + "Y3Rpb24ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzIhMKEUN1c3RvbUFj", + "dGlvblByb3RvQh+qAhxNTEFnZW50cy5Db21tdW5pY2F0b3JPYmplY3RzYgZw", + "cm90bzM=")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { - new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.CustomAction), global::MLAgents.CommunicatorObjects.CustomAction.Parser, null, null, null, null) + new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.CustomActionProto), global::MLAgents.CommunicatorObjects.CustomActionProto.Parser, null, null, null, null) })); } #endregion } #region Messages - public sealed partial class CustomAction : pb::IMessage { - private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new CustomAction()); + public sealed partial class CustomActionProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new CustomActionProto()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public static pb::MessageParser Parser { get { return _parser; } } + public static pb::MessageParser Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { @@ -54,29 +55,29 @@ public sealed partial class CustomAction : pb::IMessage { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public CustomAction() { + public CustomActionProto() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public CustomAction(CustomAction other) : this() { + public CustomActionProto(CustomActionProto other) : this() { _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public CustomAction Clone() { - return new CustomAction(this); + public CustomActionProto Clone() { + return new CustomActionProto(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { - return Equals(other as CustomAction); + return Equals(other as CustomActionProto); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public bool Equals(CustomAction other) { + public bool Equals(CustomActionProto other) { if (ReferenceEquals(other, null)) { return false; } @@ -117,7 +118,7 @@ public int CalculateSize() { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public void MergeFrom(CustomAction other) { + public void MergeFrom(CustomActionProto other) { if (other == null) { return; } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CustomAction.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CustomAction.cs.meta similarity index 83% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CustomAction.cs.meta rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CustomAction.cs.meta index d2a21e3758..3c1bc85d1d 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CustomAction.cs.meta +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CustomAction.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: a8d11b50ed9ce45f7827f5117b65db06 +guid: cc39771cc6e944eaaafb44e2da960a65 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CustomObservation.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CustomObservation.cs similarity index 81% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CustomObservation.cs rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CustomObservation.cs index c6a5af3a26..05770841b6 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CustomObservation.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CustomObservation.cs @@ -25,24 +25,24 @@ static CustomObservationReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "CjttbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2N1c3RvbV9v", - "YnNlcnZhdGlvbi5wcm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMiEwoRQ3Vz", - "dG9tT2JzZXJ2YXRpb25CH6oCHE1MQWdlbnRzLkNvbW11bmljYXRvck9iamVj", - "dHNiBnByb3RvMw==")); + "YnNlcnZhdGlvbi5wcm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMiGAoWQ3Vz", + "dG9tT2JzZXJ2YXRpb25Qcm90b0IfqgIcTUxBZ2VudHMuQ29tbXVuaWNhdG9y", + "T2JqZWN0c2IGcHJvdG8z")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { - new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.CustomObservation), global::MLAgents.CommunicatorObjects.CustomObservation.Parser, null, null, null, null) + new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.CustomObservationProto), global::MLAgents.CommunicatorObjects.CustomObservationProto.Parser, null, null, null, null) })); } #endregion } #region Messages - public sealed partial class CustomObservation : pb::IMessage { - private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new CustomObservation()); + public sealed partial class CustomObservationProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new CustomObservationProto()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public static pb::MessageParser Parser { get { return _parser; } } + public static pb::MessageParser Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { @@ -55,29 +55,29 @@ public sealed partial class CustomObservation : pb::IMessage } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public CustomObservation() { + public CustomObservationProto() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public CustomObservation(CustomObservation other) : this() { + public CustomObservationProto(CustomObservationProto other) : this() { _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public CustomObservation Clone() { - return new CustomObservation(this); + public CustomObservationProto Clone() { + return new CustomObservationProto(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { - return Equals(other as CustomObservation); + return Equals(other as CustomObservationProto); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public bool Equals(CustomObservation other) { + public bool Equals(CustomObservationProto other) { if (ReferenceEquals(other, null)) { return false; } @@ -118,7 +118,7 @@ public int CalculateSize() { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public void MergeFrom(CustomObservation other) { + public void MergeFrom(CustomObservationProto other) { if (other == null) { return; } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CustomObservation.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CustomObservation.cs.meta similarity index 83% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CustomObservation.cs.meta rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CustomObservation.cs.meta index 315c27bd79..d0dc127a0a 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CustomObservation.cs.meta +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CustomObservation.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 896847c1364a7475d9094058ff93b7f0 +guid: 186aa820efd71454db6e4cb7b883dce5 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CustomResetParameters.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CustomResetParameters.cs similarity index 81% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CustomResetParameters.cs rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CustomResetParameters.cs index 6a50d6548d..8c217b2e48 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/CustomResetParameters.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/CustomResetParameters.cs @@ -25,24 +25,24 @@ static CustomResetParametersReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "CkBtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2N1c3RvbV9y", - "ZXNldF9wYXJhbWV0ZXJzLnByb3RvEhRjb21tdW5pY2F0b3Jfb2JqZWN0cyIX", - "ChVDdXN0b21SZXNldFBhcmFtZXRlcnNCH6oCHE1MQWdlbnRzLkNvbW11bmlj", - "YXRvck9iamVjdHNiBnByb3RvMw==")); + "ZXNldF9wYXJhbWV0ZXJzLnByb3RvEhRjb21tdW5pY2F0b3Jfb2JqZWN0cyIc", + "ChpDdXN0b21SZXNldFBhcmFtZXRlcnNQcm90b0IfqgIcTUxBZ2VudHMuQ29t", + "bXVuaWNhdG9yT2JqZWN0c2IGcHJvdG8z")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { - new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.CustomResetParameters), global::MLAgents.CommunicatorObjects.CustomResetParameters.Parser, null, null, null, null) + new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.CustomResetParametersProto), global::MLAgents.CommunicatorObjects.CustomResetParametersProto.Parser, null, null, null, null) })); } #endregion } #region Messages - public sealed partial class CustomResetParameters : pb::IMessage { - private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new CustomResetParameters()); + public sealed partial class CustomResetParametersProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new CustomResetParametersProto()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public static pb::MessageParser Parser { get { return _parser; } } + public static pb::MessageParser Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { @@ -55,29 +55,29 @@ public sealed partial class CustomResetParameters : pb::IMessage // Generated by the protocol buffer compiler. DO NOT EDIT! -// source: mlagents/envs/communicator_objects/demonstration_meta_proto.proto +// source: mlagents/envs/communicator_objects/demonstration_meta.proto // #pragma warning disable 1591, 0612, 3021 #region Designer generated code @@ -11,26 +11,25 @@ using scg = global::System.Collections.Generic; namespace MLAgents.CommunicatorObjects { - /// Holder for reflection information generated from mlagents/envs/communicator_objects/demonstration_meta_proto.proto - public static partial class DemonstrationMetaProtoReflection { + /// Holder for reflection information generated from mlagents/envs/communicator_objects/demonstration_meta.proto + public static partial class DemonstrationMetaReflection { #region Descriptor - /// File descriptor for mlagents/envs/communicator_objects/demonstration_meta_proto.proto + /// File descriptor for mlagents/envs/communicator_objects/demonstration_meta.proto public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; - static DemonstrationMetaProtoReflection() { + static DemonstrationMetaReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( - "CkFtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2RlbW9uc3Ry", - "YXRpb25fbWV0YV9wcm90by5wcm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMi", - "jQEKFkRlbW9uc3RyYXRpb25NZXRhUHJvdG8SEwoLYXBpX3ZlcnNpb24YASAB", - "KAUSGgoSZGVtb25zdHJhdGlvbl9uYW1lGAIgASgJEhQKDG51bWJlcl9zdGVw", - "cxgDIAEoBRIXCg9udW1iZXJfZXBpc29kZXMYBCABKAUSEwoLbWVhbl9yZXdh", - "cmQYBSABKAJCH6oCHE1MQWdlbnRzLkNvbW11bmljYXRvck9iamVjdHNiBnBy", - "b3RvMw==")); + "CjttbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2RlbW9uc3Ry", + "YXRpb25fbWV0YS5wcm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMijQEKFkRl", + "bW9uc3RyYXRpb25NZXRhUHJvdG8SEwoLYXBpX3ZlcnNpb24YASABKAUSGgoS", + "ZGVtb25zdHJhdGlvbl9uYW1lGAIgASgJEhQKDG51bWJlcl9zdGVwcxgDIAEo", + "BRIXCg9udW1iZXJfZXBpc29kZXMYBCABKAUSEwoLbWVhbl9yZXdhcmQYBSAB", + "KAJCH6oCHE1MQWdlbnRzLkNvbW11bmljYXRvck9iamVjdHNiBnByb3RvMw==")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { @@ -49,7 +48,7 @@ public sealed partial class DemonstrationMetaProto : pb::IMessage // Generated by the protocol buffer compiler. DO NOT EDIT! -// source: mlagents/envs/communicator_objects/engine_configuration_proto.proto +// source: mlagents/envs/communicator_objects/engine_configuration.proto // #pragma warning disable 1591, 0612, 3021 #region Designer generated code @@ -11,26 +11,26 @@ using scg = global::System.Collections.Generic; namespace MLAgents.CommunicatorObjects { - /// Holder for reflection information generated from mlagents/envs/communicator_objects/engine_configuration_proto.proto - public static partial class EngineConfigurationProtoReflection { + /// Holder for reflection information generated from mlagents/envs/communicator_objects/engine_configuration.proto + public static partial class EngineConfigurationReflection { #region Descriptor - /// File descriptor for mlagents/envs/communicator_objects/engine_configuration_proto.proto + /// File descriptor for mlagents/envs/communicator_objects/engine_configuration.proto public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; - static EngineConfigurationProtoReflection() { + static EngineConfigurationReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( - "CkNtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2VuZ2luZV9j", - "b25maWd1cmF0aW9uX3Byb3RvLnByb3RvEhRjb21tdW5pY2F0b3Jfb2JqZWN0", - "cyKVAQoYRW5naW5lQ29uZmlndXJhdGlvblByb3RvEg0KBXdpZHRoGAEgASgF", - "Eg4KBmhlaWdodBgCIAEoBRIVCg1xdWFsaXR5X2xldmVsGAMgASgFEhIKCnRp", - "bWVfc2NhbGUYBCABKAISGQoRdGFyZ2V0X2ZyYW1lX3JhdGUYBSABKAUSFAoM", - "c2hvd19tb25pdG9yGAYgASgIQh+qAhxNTEFnZW50cy5Db21tdW5pY2F0b3JP", - "YmplY3RzYgZwcm90bzM=")); + "Cj1tbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2VuZ2luZV9j", + "b25maWd1cmF0aW9uLnByb3RvEhRjb21tdW5pY2F0b3Jfb2JqZWN0cyKVAQoY", + "RW5naW5lQ29uZmlndXJhdGlvblByb3RvEg0KBXdpZHRoGAEgASgFEg4KBmhl", + "aWdodBgCIAEoBRIVCg1xdWFsaXR5X2xldmVsGAMgASgFEhIKCnRpbWVfc2Nh", + "bGUYBCABKAISGQoRdGFyZ2V0X2ZyYW1lX3JhdGUYBSABKAUSFAoMc2hvd19t", + "b25pdG9yGAYgASgIQh+qAhxNTEFnZW50cy5Db21tdW5pY2F0b3JPYmplY3Rz", + "YgZwcm90bzM=")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { @@ -49,7 +49,7 @@ public sealed partial class EngineConfigurationProto : pb::IMessage // Generated by the protocol buffer compiler. DO NOT EDIT! -// source: mlagents/envs/communicator_objects/environment_parameters_proto.proto +// source: mlagents/envs/communicator_objects/environment_parameters.proto // #pragma warning disable 1591, 0612, 3021 #region Designer generated code @@ -11,30 +11,30 @@ using scg = global::System.Collections.Generic; namespace MLAgents.CommunicatorObjects { - /// Holder for reflection information generated from mlagents/envs/communicator_objects/environment_parameters_proto.proto - public static partial class EnvironmentParametersProtoReflection { + /// Holder for reflection information generated from mlagents/envs/communicator_objects/environment_parameters.proto + public static partial class EnvironmentParametersReflection { #region Descriptor - /// File descriptor for mlagents/envs/communicator_objects/environment_parameters_proto.proto + /// File descriptor for mlagents/envs/communicator_objects/environment_parameters.proto public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; - static EnvironmentParametersProtoReflection() { + static EnvironmentParametersReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( - "CkVtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2Vudmlyb25t", - "ZW50X3BhcmFtZXRlcnNfcHJvdG8ucHJvdG8SFGNvbW11bmljYXRvcl9vYmpl", - "Y3RzGkBtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2N1c3Rv", - "bV9yZXNldF9wYXJhbWV0ZXJzLnByb3RvIoMCChpFbnZpcm9ubWVudFBhcmFt", - "ZXRlcnNQcm90bxJfChBmbG9hdF9wYXJhbWV0ZXJzGAEgAygLMkUuY29tbXVu", - "aWNhdG9yX29iamVjdHMuRW52aXJvbm1lbnRQYXJhbWV0ZXJzUHJvdG8uRmxv", - "YXRQYXJhbWV0ZXJzRW50cnkSTAoXY3VzdG9tX3Jlc2V0X3BhcmFtZXRlcnMY", - "AiABKAsyKy5jb21tdW5pY2F0b3Jfb2JqZWN0cy5DdXN0b21SZXNldFBhcmFt", - "ZXRlcnMaNgoURmxvYXRQYXJhbWV0ZXJzRW50cnkSCwoDa2V5GAEgASgJEg0K", - "BXZhbHVlGAIgASgCOgI4AUIfqgIcTUxBZ2VudHMuQ29tbXVuaWNhdG9yT2Jq", - "ZWN0c2IGcHJvdG8z")); + "Cj9tbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2Vudmlyb25t", + "ZW50X3BhcmFtZXRlcnMucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzGkBt", + "bGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2N1c3RvbV9yZXNl", + "dF9wYXJhbWV0ZXJzLnByb3RvIogCChpFbnZpcm9ubWVudFBhcmFtZXRlcnNQ", + "cm90bxJfChBmbG9hdF9wYXJhbWV0ZXJzGAEgAygLMkUuY29tbXVuaWNhdG9y", + "X29iamVjdHMuRW52aXJvbm1lbnRQYXJhbWV0ZXJzUHJvdG8uRmxvYXRQYXJh", + "bWV0ZXJzRW50cnkSUQoXY3VzdG9tX3Jlc2V0X3BhcmFtZXRlcnMYAiABKAsy", + "MC5jb21tdW5pY2F0b3Jfb2JqZWN0cy5DdXN0b21SZXNldFBhcmFtZXRlcnNQ", + "cm90bxo2ChRGbG9hdFBhcmFtZXRlcnNFbnRyeRILCgNrZXkYASABKAkSDQoF", + "dmFsdWUYAiABKAI6AjgBQh+qAhxNTEFnZW50cy5Db21tdW5pY2F0b3JPYmpl", + "Y3RzYgZwcm90bzM=")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.CustomResetParametersReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { @@ -53,7 +53,7 @@ public sealed partial class EnvironmentParametersProto : pb::IMessageField number for the "custom_reset_parameters" field. public const int CustomResetParametersFieldNumber = 2; - private global::MLAgents.CommunicatorObjects.CustomResetParameters customResetParameters_; + private global::MLAgents.CommunicatorObjects.CustomResetParametersProto customResetParameters_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public global::MLAgents.CommunicatorObjects.CustomResetParameters CustomResetParameters { + public global::MLAgents.CommunicatorObjects.CustomResetParametersProto CustomResetParameters { get { return customResetParameters_; } set { customResetParameters_ = value; @@ -168,7 +168,7 @@ public void MergeFrom(EnvironmentParametersProto other) { floatParameters_.Add(other.floatParameters_); if (other.customResetParameters_ != null) { if (customResetParameters_ == null) { - CustomResetParameters = new global::MLAgents.CommunicatorObjects.CustomResetParameters(); + customResetParameters_ = new global::MLAgents.CommunicatorObjects.CustomResetParametersProto(); } CustomResetParameters.MergeFrom(other.CustomResetParameters); } @@ -189,9 +189,9 @@ public void MergeFrom(pb::CodedInputStream input) { } case 18: { if (customResetParameters_ == null) { - CustomResetParameters = new global::MLAgents.CommunicatorObjects.CustomResetParameters(); + customResetParameters_ = new global::MLAgents.CommunicatorObjects.CustomResetParametersProto(); } - input.ReadMessage(CustomResetParameters); + input.ReadMessage(customResetParameters_); break; } } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/EnvironmentParameters.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/EnvironmentParameters.cs.meta new file mode 100644 index 0000000000..71c37a573b --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/EnvironmentParameters.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 8b4c58a64d6a94f579774322ef683b17 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/Header.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/Header.cs similarity index 86% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/Header.cs rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/Header.cs index 46bd73b2c1..a0ee8f36ad 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/Header.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/Header.cs @@ -25,24 +25,24 @@ static HeaderReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "Ci9tbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL2hlYWRlci5w", - "cm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMiKQoGSGVhZGVyEg4KBnN0YXR1", - "cxgBIAEoBRIPCgdtZXNzYWdlGAIgASgJQh+qAhxNTEFnZW50cy5Db21tdW5p", - "Y2F0b3JPYmplY3RzYgZwcm90bzM=")); + "cm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMiLgoLSGVhZGVyUHJvdG8SDgoG", + "c3RhdHVzGAEgASgFEg8KB21lc3NhZ2UYAiABKAlCH6oCHE1MQWdlbnRzLkNv", + "bW11bmljYXRvck9iamVjdHNiBnByb3RvMw==")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { - new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.Header), global::MLAgents.CommunicatorObjects.Header.Parser, new[]{ "Status", "Message" }, null, null, null) + new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.HeaderProto), global::MLAgents.CommunicatorObjects.HeaderProto.Parser, new[]{ "Status", "Message" }, null, null, null) })); } #endregion } #region Messages - public sealed partial class Header : pb::IMessage
{ - private static readonly pb::MessageParser
_parser = new pb::MessageParser
(() => new Header()); + public sealed partial class HeaderProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new HeaderProto()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public static pb::MessageParser
Parser { get { return _parser; } } + public static pb::MessageParser Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { @@ -55,22 +55,22 @@ public sealed partial class Header : pb::IMessage
{ } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public Header() { + public HeaderProto() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public Header(Header other) : this() { + public HeaderProto(HeaderProto other) : this() { status_ = other.status_; message_ = other.message_; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public Header Clone() { - return new Header(this); + public HeaderProto Clone() { + return new HeaderProto(this); } /// Field number for the "status" field. @@ -97,11 +97,11 @@ public string Message { [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { - return Equals(other as Header); + return Equals(other as HeaderProto); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public bool Equals(Header other) { + public bool Equals(HeaderProto other) { if (ReferenceEquals(other, null)) { return false; } @@ -160,7 +160,7 @@ public int CalculateSize() { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public void MergeFrom(Header other) { + public void MergeFrom(HeaderProto other) { if (other == null) { return; } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/Header.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/Header.cs.meta similarity index 83% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/Header.cs.meta rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/Header.cs.meta index 63d91fcc8a..3084742c95 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/Header.cs.meta +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/Header.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 8bb8aabfab48b408381733bccccd5af9 +guid: 870996bd75a1a4fbcbb120b1e1e66c37 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/SpaceTypeProto.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/SpaceType.cs similarity index 60% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/SpaceTypeProto.cs rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/SpaceType.cs index 8990b9390c..6b37621f98 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/SpaceTypeProto.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/SpaceType.cs @@ -1,6 +1,6 @@ // // Generated by the protocol buffer compiler. DO NOT EDIT! -// source: mlagents/envs/communicator_objects/space_type_proto.proto +// source: mlagents/envs/communicator_objects/space_type.proto // #pragma warning disable 1591, 0612, 3021 #region Designer generated code @@ -11,27 +11,25 @@ using scg = global::System.Collections.Generic; namespace MLAgents.CommunicatorObjects { - /// Holder for reflection information generated from mlagents/envs/communicator_objects/space_type_proto.proto - public static partial class SpaceTypeProtoReflection { + /// Holder for reflection information generated from mlagents/envs/communicator_objects/space_type.proto + public static partial class SpaceTypeReflection { #region Descriptor - /// File descriptor for mlagents/envs/communicator_objects/space_type_proto.proto + /// File descriptor for mlagents/envs/communicator_objects/space_type.proto public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; - static SpaceTypeProtoReflection() { + static SpaceTypeReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( - "CjltbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3NwYWNlX3R5", - "cGVfcHJvdG8ucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzGjltbGFnZW50", - "cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3Jlc29sdXRpb25fcHJvdG8u", - "cHJvdG8qLgoOU3BhY2VUeXBlUHJvdG8SDAoIZGlzY3JldGUQABIOCgpjb250", - "aW51b3VzEAFCH6oCHE1MQWdlbnRzLkNvbW11bmljYXRvck9iamVjdHNiBnBy", - "b3RvMw==")); + "CjNtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3NwYWNlX3R5", + "cGUucHJvdG8SFGNvbW11bmljYXRvcl9vYmplY3RzKi4KDlNwYWNlVHlwZVBy", + "b3RvEgwKCGRpc2NyZXRlEAASDgoKY29udGludW91cxABQh+qAhxNTEFnZW50", + "cy5Db21tdW5pY2F0b3JPYmplY3RzYgZwcm90bzM=")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, - new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.ResolutionProtoReflection.Descriptor, }, + new pbr::FileDescriptor[] { }, new pbr::GeneratedClrTypeInfo(new[] {typeof(global::MLAgents.CommunicatorObjects.SpaceTypeProto), }, null)); } #endregion diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/SpaceType.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/SpaceType.cs.meta new file mode 100644 index 0000000000..7b6ada73ea --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/SpaceType.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 3934602aadbe9471ca973685059ef04a +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityInput.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityInput.cs similarity index 75% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityInput.cs rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityInput.cs index 8bad282ab3..a1ec83ac1c 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityInput.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityInput.cs @@ -28,26 +28,27 @@ static UnityInputReflection() { "cHV0LnByb3RvEhRjb21tdW5pY2F0b3Jfb2JqZWN0cxo3bWxhZ2VudHMvZW52", "cy9jb21tdW5pY2F0b3Jfb2JqZWN0cy91bml0eV9ybF9pbnB1dC5wcm90bxpG", "bWxhZ2VudHMvZW52cy9jb21tdW5pY2F0b3Jfb2JqZWN0cy91bml0eV9ybF9p", - "bml0aWFsaXphdGlvbl9pbnB1dC5wcm90byKVAQoKVW5pdHlJbnB1dBI0Cghy", - "bF9pbnB1dBgBIAEoCzIiLmNvbW11bmljYXRvcl9vYmplY3RzLlVuaXR5UkxJ", - "bnB1dBJRChdybF9pbml0aWFsaXphdGlvbl9pbnB1dBgCIAEoCzIwLmNvbW11", - "bmljYXRvcl9vYmplY3RzLlVuaXR5UkxJbml0aWFsaXphdGlvbklucHV0Qh+q", - "AhxNTEFnZW50cy5Db21tdW5pY2F0b3JPYmplY3RzYgZwcm90bzM=")); + "bml0aWFsaXphdGlvbl9pbnB1dC5wcm90byKkAQoPVW5pdHlJbnB1dFByb3Rv", + "EjkKCHJsX2lucHV0GAEgASgLMicuY29tbXVuaWNhdG9yX29iamVjdHMuVW5p", + "dHlSTElucHV0UHJvdG8SVgoXcmxfaW5pdGlhbGl6YXRpb25faW5wdXQYAiAB", + "KAsyNS5jb21tdW5pY2F0b3Jfb2JqZWN0cy5Vbml0eVJMSW5pdGlhbGl6YXRp", + "b25JbnB1dFByb3RvQh+qAhxNTEFnZW50cy5Db21tdW5pY2F0b3JPYmplY3Rz", + "YgZwcm90bzM=")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.UnityRlInputReflection.Descriptor, global::MLAgents.CommunicatorObjects.UnityRlInitializationInputReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { - new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityInput), global::MLAgents.CommunicatorObjects.UnityInput.Parser, new[]{ "RlInput", "RlInitializationInput" }, null, null, null) + new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityInputProto), global::MLAgents.CommunicatorObjects.UnityInputProto.Parser, new[]{ "RlInput", "RlInitializationInput" }, null, null, null) })); } #endregion } #region Messages - public sealed partial class UnityInput : pb::IMessage { - private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new UnityInput()); + public sealed partial class UnityInputProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new UnityInputProto()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public static pb::MessageParser Parser { get { return _parser; } } + public static pb::MessageParser Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { @@ -60,29 +61,29 @@ public sealed partial class UnityInput : pb::IMessage { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public UnityInput() { + public UnityInputProto() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public UnityInput(UnityInput other) : this() { - rlInput_ = other.rlInput_ != null ? other.rlInput_.Clone() : null; - rlInitializationInput_ = other.rlInitializationInput_ != null ? other.rlInitializationInput_.Clone() : null; + public UnityInputProto(UnityInputProto other) : this() { + RlInput = other.rlInput_ != null ? other.RlInput.Clone() : null; + RlInitializationInput = other.rlInitializationInput_ != null ? other.RlInitializationInput.Clone() : null; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public UnityInput Clone() { - return new UnityInput(this); + public UnityInputProto Clone() { + return new UnityInputProto(this); } /// Field number for the "rl_input" field. public const int RlInputFieldNumber = 1; - private global::MLAgents.CommunicatorObjects.UnityRLInput rlInput_; + private global::MLAgents.CommunicatorObjects.UnityRLInputProto rlInput_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public global::MLAgents.CommunicatorObjects.UnityRLInput RlInput { + public global::MLAgents.CommunicatorObjects.UnityRLInputProto RlInput { get { return rlInput_; } set { rlInput_ = value; @@ -91,9 +92,9 @@ public UnityInput Clone() { /// Field number for the "rl_initialization_input" field. public const int RlInitializationInputFieldNumber = 2; - private global::MLAgents.CommunicatorObjects.UnityRLInitializationInput rlInitializationInput_; + private global::MLAgents.CommunicatorObjects.UnityRLInitializationInputProto rlInitializationInput_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public global::MLAgents.CommunicatorObjects.UnityRLInitializationInput RlInitializationInput { + public global::MLAgents.CommunicatorObjects.UnityRLInitializationInputProto RlInitializationInput { get { return rlInitializationInput_; } set { rlInitializationInput_ = value; @@ -102,11 +103,11 @@ public UnityInput Clone() { [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { - return Equals(other as UnityInput); + return Equals(other as UnityInputProto); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public bool Equals(UnityInput other) { + public bool Equals(UnityInputProto other) { if (ReferenceEquals(other, null)) { return false; } @@ -165,19 +166,19 @@ public int CalculateSize() { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public void MergeFrom(UnityInput other) { + public void MergeFrom(UnityInputProto other) { if (other == null) { return; } if (other.rlInput_ != null) { if (rlInput_ == null) { - RlInput = new global::MLAgents.CommunicatorObjects.UnityRLInput(); + rlInput_ = new global::MLAgents.CommunicatorObjects.UnityRLInputProto(); } RlInput.MergeFrom(other.RlInput); } if (other.rlInitializationInput_ != null) { if (rlInitializationInput_ == null) { - RlInitializationInput = new global::MLAgents.CommunicatorObjects.UnityRLInitializationInput(); + rlInitializationInput_ = new global::MLAgents.CommunicatorObjects.UnityRLInitializationInputProto(); } RlInitializationInput.MergeFrom(other.RlInitializationInput); } @@ -194,16 +195,16 @@ public void MergeFrom(pb::CodedInputStream input) { break; case 10: { if (rlInput_ == null) { - RlInput = new global::MLAgents.CommunicatorObjects.UnityRLInput(); + rlInput_ = new global::MLAgents.CommunicatorObjects.UnityRLInputProto(); } - input.ReadMessage(RlInput); + input.ReadMessage(rlInput_); break; } case 18: { if (rlInitializationInput_ == null) { - RlInitializationInput = new global::MLAgents.CommunicatorObjects.UnityRLInitializationInput(); + rlInitializationInput_ = new global::MLAgents.CommunicatorObjects.UnityRLInitializationInputProto(); } - input.ReadMessage(RlInitializationInput); + input.ReadMessage(rlInitializationInput_); break; } } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityInput.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityInput.cs.meta similarity index 83% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityInput.cs.meta rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityInput.cs.meta index 846a8eb53a..32f1aa8334 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityInput.cs.meta +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityInput.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 25e46cd9eca204e19a08fa938802ef9d +guid: 13de5026cc0834f558fe971eb93c850e MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityMessage.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityMessage.cs similarity index 73% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityMessage.cs rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityMessage.cs index 11c6d4e89f..0b101b7d27 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityMessage.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityMessage.cs @@ -29,27 +29,27 @@ static UnityMessageReflection() { "bnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3VuaXR5X291dHB1dC5wcm90bxo0", "bWxhZ2VudHMvZW52cy9jb21tdW5pY2F0b3Jfb2JqZWN0cy91bml0eV9pbnB1", "dC5wcm90bxovbWxhZ2VudHMvZW52cy9jb21tdW5pY2F0b3Jfb2JqZWN0cy9o", - "ZWFkZXIucHJvdG8irAEKDFVuaXR5TWVzc2FnZRIsCgZoZWFkZXIYASABKAsy", - "HC5jb21tdW5pY2F0b3Jfb2JqZWN0cy5IZWFkZXISNwoMdW5pdHlfb3V0cHV0", - "GAIgASgLMiEuY29tbXVuaWNhdG9yX29iamVjdHMuVW5pdHlPdXRwdXQSNQoL", - "dW5pdHlfaW5wdXQYAyABKAsyIC5jb21tdW5pY2F0b3Jfb2JqZWN0cy5Vbml0", - "eUlucHV0Qh+qAhxNTEFnZW50cy5Db21tdW5pY2F0b3JPYmplY3RzYgZwcm90", - "bzM=")); + "ZWFkZXIucHJvdG8iwAEKEVVuaXR5TWVzc2FnZVByb3RvEjEKBmhlYWRlchgB", + "IAEoCzIhLmNvbW11bmljYXRvcl9vYmplY3RzLkhlYWRlclByb3RvEjwKDHVu", + "aXR5X291dHB1dBgCIAEoCzImLmNvbW11bmljYXRvcl9vYmplY3RzLlVuaXR5", + "T3V0cHV0UHJvdG8SOgoLdW5pdHlfaW5wdXQYAyABKAsyJS5jb21tdW5pY2F0", + "b3Jfb2JqZWN0cy5Vbml0eUlucHV0UHJvdG9CH6oCHE1MQWdlbnRzLkNvbW11", + "bmljYXRvck9iamVjdHNiBnByb3RvMw==")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.UnityOutputReflection.Descriptor, global::MLAgents.CommunicatorObjects.UnityInputReflection.Descriptor, global::MLAgents.CommunicatorObjects.HeaderReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { - new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityMessage), global::MLAgents.CommunicatorObjects.UnityMessage.Parser, new[]{ "Header", "UnityOutput", "UnityInput" }, null, null, null) + new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityMessageProto), global::MLAgents.CommunicatorObjects.UnityMessageProto.Parser, new[]{ "Header", "UnityOutput", "UnityInput" }, null, null, null) })); } #endregion } #region Messages - public sealed partial class UnityMessage : pb::IMessage { - private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new UnityMessage()); + public sealed partial class UnityMessageProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new UnityMessageProto()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public static pb::MessageParser Parser { get { return _parser; } } + public static pb::MessageParser Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { @@ -62,30 +62,30 @@ public sealed partial class UnityMessage : pb::IMessage { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public UnityMessage() { + public UnityMessageProto() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public UnityMessage(UnityMessage other) : this() { - header_ = other.header_ != null ? other.header_.Clone() : null; - unityOutput_ = other.unityOutput_ != null ? other.unityOutput_.Clone() : null; - unityInput_ = other.unityInput_ != null ? other.unityInput_.Clone() : null; + public UnityMessageProto(UnityMessageProto other) : this() { + Header = other.header_ != null ? other.Header.Clone() : null; + UnityOutput = other.unityOutput_ != null ? other.UnityOutput.Clone() : null; + UnityInput = other.unityInput_ != null ? other.UnityInput.Clone() : null; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public UnityMessage Clone() { - return new UnityMessage(this); + public UnityMessageProto Clone() { + return new UnityMessageProto(this); } /// Field number for the "header" field. public const int HeaderFieldNumber = 1; - private global::MLAgents.CommunicatorObjects.Header header_; + private global::MLAgents.CommunicatorObjects.HeaderProto header_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public global::MLAgents.CommunicatorObjects.Header Header { + public global::MLAgents.CommunicatorObjects.HeaderProto Header { get { return header_; } set { header_ = value; @@ -94,9 +94,9 @@ public UnityMessage Clone() { /// Field number for the "unity_output" field. public const int UnityOutputFieldNumber = 2; - private global::MLAgents.CommunicatorObjects.UnityOutput unityOutput_; + private global::MLAgents.CommunicatorObjects.UnityOutputProto unityOutput_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public global::MLAgents.CommunicatorObjects.UnityOutput UnityOutput { + public global::MLAgents.CommunicatorObjects.UnityOutputProto UnityOutput { get { return unityOutput_; } set { unityOutput_ = value; @@ -105,9 +105,9 @@ public UnityMessage Clone() { /// Field number for the "unity_input" field. public const int UnityInputFieldNumber = 3; - private global::MLAgents.CommunicatorObjects.UnityInput unityInput_; + private global::MLAgents.CommunicatorObjects.UnityInputProto unityInput_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public global::MLAgents.CommunicatorObjects.UnityInput UnityInput { + public global::MLAgents.CommunicatorObjects.UnityInputProto UnityInput { get { return unityInput_; } set { unityInput_ = value; @@ -116,11 +116,11 @@ public UnityMessage Clone() { [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { - return Equals(other as UnityMessage); + return Equals(other as UnityMessageProto); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public bool Equals(UnityMessage other) { + public bool Equals(UnityMessageProto other) { if (ReferenceEquals(other, null)) { return false; } @@ -188,25 +188,25 @@ public int CalculateSize() { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public void MergeFrom(UnityMessage other) { + public void MergeFrom(UnityMessageProto other) { if (other == null) { return; } if (other.header_ != null) { if (header_ == null) { - Header = new global::MLAgents.CommunicatorObjects.Header(); + header_ = new global::MLAgents.CommunicatorObjects.HeaderProto(); } Header.MergeFrom(other.Header); } if (other.unityOutput_ != null) { if (unityOutput_ == null) { - UnityOutput = new global::MLAgents.CommunicatorObjects.UnityOutput(); + unityOutput_ = new global::MLAgents.CommunicatorObjects.UnityOutputProto(); } UnityOutput.MergeFrom(other.UnityOutput); } if (other.unityInput_ != null) { if (unityInput_ == null) { - UnityInput = new global::MLAgents.CommunicatorObjects.UnityInput(); + unityInput_ = new global::MLAgents.CommunicatorObjects.UnityInputProto(); } UnityInput.MergeFrom(other.UnityInput); } @@ -223,23 +223,23 @@ public void MergeFrom(pb::CodedInputStream input) { break; case 10: { if (header_ == null) { - Header = new global::MLAgents.CommunicatorObjects.Header(); + header_ = new global::MLAgents.CommunicatorObjects.HeaderProto(); } - input.ReadMessage(Header); + input.ReadMessage(header_); break; } case 18: { if (unityOutput_ == null) { - UnityOutput = new global::MLAgents.CommunicatorObjects.UnityOutput(); + unityOutput_ = new global::MLAgents.CommunicatorObjects.UnityOutputProto(); } - input.ReadMessage(UnityOutput); + input.ReadMessage(unityOutput_); break; } case 26: { if (unityInput_ == null) { - UnityInput = new global::MLAgents.CommunicatorObjects.UnityInput(); + unityInput_ = new global::MLAgents.CommunicatorObjects.UnityInputProto(); } - input.ReadMessage(UnityInput); + input.ReadMessage(unityInput_); break; } } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityMessage.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityMessage.cs.meta similarity index 83% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityMessage.cs.meta rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityMessage.cs.meta index 6df8c3974f..fe03de4e4f 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityMessage.cs.meta +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityMessage.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: d270bf9ce3d564bb48b2095802c15ff9 +guid: e2189c32296994576b0ef0aaa2b78142 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityOutput.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityOutput.cs similarity index 75% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityOutput.cs rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityOutput.cs index dca5823934..c2529a79e8 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityOutput.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityOutput.cs @@ -28,27 +28,27 @@ static UnityOutputReflection() { "dHB1dC5wcm90bxIUY29tbXVuaWNhdG9yX29iamVjdHMaOG1sYWdlbnRzL2Vu", "dnMvY29tbXVuaWNhdG9yX29iamVjdHMvdW5pdHlfcmxfb3V0cHV0LnByb3Rv", "GkdtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3VuaXR5X3Js", - "X2luaXRpYWxpemF0aW9uX291dHB1dC5wcm90byKaAQoLVW5pdHlPdXRwdXQS", - "NgoJcmxfb3V0cHV0GAEgASgLMiMuY29tbXVuaWNhdG9yX29iamVjdHMuVW5p", - "dHlSTE91dHB1dBJTChhybF9pbml0aWFsaXphdGlvbl9vdXRwdXQYAiABKAsy", - "MS5jb21tdW5pY2F0b3Jfb2JqZWN0cy5Vbml0eVJMSW5pdGlhbGl6YXRpb25P", - "dXRwdXRCH6oCHE1MQWdlbnRzLkNvbW11bmljYXRvck9iamVjdHNiBnByb3Rv", - "Mw==")); + "X2luaXRpYWxpemF0aW9uX291dHB1dC5wcm90byKpAQoQVW5pdHlPdXRwdXRQ", + "cm90bxI7CglybF9vdXRwdXQYASABKAsyKC5jb21tdW5pY2F0b3Jfb2JqZWN0", + "cy5Vbml0eVJMT3V0cHV0UHJvdG8SWAoYcmxfaW5pdGlhbGl6YXRpb25fb3V0", + "cHV0GAIgASgLMjYuY29tbXVuaWNhdG9yX29iamVjdHMuVW5pdHlSTEluaXRp", + "YWxpemF0aW9uT3V0cHV0UHJvdG9CH6oCHE1MQWdlbnRzLkNvbW11bmljYXRv", + "ck9iamVjdHNiBnByb3RvMw==")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.UnityRlOutputReflection.Descriptor, global::MLAgents.CommunicatorObjects.UnityRlInitializationOutputReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { - new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityOutput), global::MLAgents.CommunicatorObjects.UnityOutput.Parser, new[]{ "RlOutput", "RlInitializationOutput" }, null, null, null) + new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityOutputProto), global::MLAgents.CommunicatorObjects.UnityOutputProto.Parser, new[]{ "RlOutput", "RlInitializationOutput" }, null, null, null) })); } #endregion } #region Messages - public sealed partial class UnityOutput : pb::IMessage { - private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new UnityOutput()); + public sealed partial class UnityOutputProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new UnityOutputProto()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public static pb::MessageParser Parser { get { return _parser; } } + public static pb::MessageParser Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { @@ -61,29 +61,29 @@ public sealed partial class UnityOutput : pb::IMessage { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public UnityOutput() { + public UnityOutputProto() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public UnityOutput(UnityOutput other) : this() { - rlOutput_ = other.rlOutput_ != null ? other.rlOutput_.Clone() : null; - rlInitializationOutput_ = other.rlInitializationOutput_ != null ? other.rlInitializationOutput_.Clone() : null; + public UnityOutputProto(UnityOutputProto other) : this() { + RlOutput = other.rlOutput_ != null ? other.RlOutput.Clone() : null; + RlInitializationOutput = other.rlInitializationOutput_ != null ? other.RlInitializationOutput.Clone() : null; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public UnityOutput Clone() { - return new UnityOutput(this); + public UnityOutputProto Clone() { + return new UnityOutputProto(this); } /// Field number for the "rl_output" field. public const int RlOutputFieldNumber = 1; - private global::MLAgents.CommunicatorObjects.UnityRLOutput rlOutput_; + private global::MLAgents.CommunicatorObjects.UnityRLOutputProto rlOutput_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public global::MLAgents.CommunicatorObjects.UnityRLOutput RlOutput { + public global::MLAgents.CommunicatorObjects.UnityRLOutputProto RlOutput { get { return rlOutput_; } set { rlOutput_ = value; @@ -92,9 +92,9 @@ public UnityOutput Clone() { /// Field number for the "rl_initialization_output" field. public const int RlInitializationOutputFieldNumber = 2; - private global::MLAgents.CommunicatorObjects.UnityRLInitializationOutput rlInitializationOutput_; + private global::MLAgents.CommunicatorObjects.UnityRLInitializationOutputProto rlInitializationOutput_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public global::MLAgents.CommunicatorObjects.UnityRLInitializationOutput RlInitializationOutput { + public global::MLAgents.CommunicatorObjects.UnityRLInitializationOutputProto RlInitializationOutput { get { return rlInitializationOutput_; } set { rlInitializationOutput_ = value; @@ -103,11 +103,11 @@ public UnityOutput Clone() { [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { - return Equals(other as UnityOutput); + return Equals(other as UnityOutputProto); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public bool Equals(UnityOutput other) { + public bool Equals(UnityOutputProto other) { if (ReferenceEquals(other, null)) { return false; } @@ -166,19 +166,19 @@ public int CalculateSize() { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public void MergeFrom(UnityOutput other) { + public void MergeFrom(UnityOutputProto other) { if (other == null) { return; } if (other.rlOutput_ != null) { if (rlOutput_ == null) { - RlOutput = new global::MLAgents.CommunicatorObjects.UnityRLOutput(); + rlOutput_ = new global::MLAgents.CommunicatorObjects.UnityRLOutputProto(); } RlOutput.MergeFrom(other.RlOutput); } if (other.rlInitializationOutput_ != null) { if (rlInitializationOutput_ == null) { - RlInitializationOutput = new global::MLAgents.CommunicatorObjects.UnityRLInitializationOutput(); + rlInitializationOutput_ = new global::MLAgents.CommunicatorObjects.UnityRLInitializationOutputProto(); } RlInitializationOutput.MergeFrom(other.RlInitializationOutput); } @@ -195,16 +195,16 @@ public void MergeFrom(pb::CodedInputStream input) { break; case 10: { if (rlOutput_ == null) { - RlOutput = new global::MLAgents.CommunicatorObjects.UnityRLOutput(); + rlOutput_ = new global::MLAgents.CommunicatorObjects.UnityRLOutputProto(); } - input.ReadMessage(RlOutput); + input.ReadMessage(rlOutput_); break; } case 18: { if (rlInitializationOutput_ == null) { - RlInitializationOutput = new global::MLAgents.CommunicatorObjects.UnityRLInitializationOutput(); + rlInitializationOutput_ = new global::MLAgents.CommunicatorObjects.UnityRLInitializationOutputProto(); } - input.ReadMessage(RlInitializationOutput); + input.ReadMessage(rlInitializationOutput_); break; } } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityOutput.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityOutput.cs.meta similarity index 83% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityOutput.cs.meta rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityOutput.cs.meta index 256098d3b1..e1ae734459 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityOutput.cs.meta +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityOutput.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 5b7166f97831f45ef86df5eed0042240 +guid: e1c19e75c7657497fbc05cfa40dd6783 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityRlInitializationInput.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityRlInitializationInput.cs similarity index 81% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityRlInitializationInput.cs rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityRlInitializationInput.cs index af1f18a1ea..f9245fd4d3 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityRlInitializationInput.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityRlInitializationInput.cs @@ -26,23 +26,24 @@ static UnityRlInitializationInputReflection() { string.Concat( "CkZtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3VuaXR5X3Js", "X2luaXRpYWxpemF0aW9uX2lucHV0LnByb3RvEhRjb21tdW5pY2F0b3Jfb2Jq", - "ZWN0cyIqChpVbml0eVJMSW5pdGlhbGl6YXRpb25JbnB1dBIMCgRzZWVkGAEg", - "ASgFQh+qAhxNTEFnZW50cy5Db21tdW5pY2F0b3JPYmplY3RzYgZwcm90bzM=")); + "ZWN0cyIvCh9Vbml0eVJMSW5pdGlhbGl6YXRpb25JbnB1dFByb3RvEgwKBHNl", + "ZWQYASABKAVCH6oCHE1MQWdlbnRzLkNvbW11bmljYXRvck9iamVjdHNiBnBy", + "b3RvMw==")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { - new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityRLInitializationInput), global::MLAgents.CommunicatorObjects.UnityRLInitializationInput.Parser, new[]{ "Seed" }, null, null, null) + new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityRLInitializationInputProto), global::MLAgents.CommunicatorObjects.UnityRLInitializationInputProto.Parser, new[]{ "Seed" }, null, null, null) })); } #endregion } #region Messages - public sealed partial class UnityRLInitializationInput : pb::IMessage { - private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new UnityRLInitializationInput()); + public sealed partial class UnityRLInitializationInputProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new UnityRLInitializationInputProto()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public static pb::MessageParser Parser { get { return _parser; } } + public static pb::MessageParser Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { @@ -55,21 +56,21 @@ public sealed partial class UnityRLInitializationInput : pb::IMessageField number for the "seed" field.
@@ -85,11 +86,11 @@ public int Seed { [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { - return Equals(other as UnityRLInitializationInput); + return Equals(other as UnityRLInitializationInputProto); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public bool Equals(UnityRLInitializationInput other) { + public bool Equals(UnityRLInitializationInputProto other) { if (ReferenceEquals(other, null)) { return false; } @@ -139,7 +140,7 @@ public int CalculateSize() { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public void MergeFrom(UnityRLInitializationInput other) { + public void MergeFrom(UnityRLInitializationInputProto other) { if (other == null) { return; } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityRlInitializationInput.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityRlInitializationInput.cs.meta similarity index 83% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityRlInitializationInput.cs.meta rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityRlInitializationInput.cs.meta index eb0f1e1c26..c0c9fc2fd4 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityRlInitializationInput.cs.meta +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityRlInitializationInput.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 6c81750abd5a9432babe1834534122c0 +guid: e1542ad34ffb34317b74b239135d0477 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityRlInitializationOutput.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityRlInitializationOutput.cs similarity index 81% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityRlInitializationOutput.cs rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityRlInitializationOutput.cs index cc5799f48f..ae0fc7ce7b 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityRlInitializationOutput.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityRlInitializationOutput.cs @@ -26,20 +26,20 @@ static UnityRlInitializationOutputReflection() { string.Concat( "CkdtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3VuaXR5X3Js", "X2luaXRpYWxpemF0aW9uX291dHB1dC5wcm90bxIUY29tbXVuaWNhdG9yX29i", - "amVjdHMaP21sYWdlbnRzL2VudnMvY29tbXVuaWNhdG9yX29iamVjdHMvYnJh", - "aW5fcGFyYW1ldGVyc19wcm90by5wcm90bxpFbWxhZ2VudHMvZW52cy9jb21t", - "dW5pY2F0b3Jfb2JqZWN0cy9lbnZpcm9ubWVudF9wYXJhbWV0ZXJzX3Byb3Rv", - "LnByb3RvIuYBChtVbml0eVJMSW5pdGlhbGl6YXRpb25PdXRwdXQSDAoEbmFt", - "ZRgBIAEoCRIPCgd2ZXJzaW9uGAIgASgJEhAKCGxvZ19wYXRoGAMgASgJEkQK", - "EGJyYWluX3BhcmFtZXRlcnMYBSADKAsyKi5jb21tdW5pY2F0b3Jfb2JqZWN0", - "cy5CcmFpblBhcmFtZXRlcnNQcm90bxJQChZlbnZpcm9ubWVudF9wYXJhbWV0", - "ZXJzGAYgASgLMjAuY29tbXVuaWNhdG9yX29iamVjdHMuRW52aXJvbm1lbnRQ", - "YXJhbWV0ZXJzUHJvdG9CH6oCHE1MQWdlbnRzLkNvbW11bmljYXRvck9iamVj", - "dHNiBnByb3RvMw==")); + "amVjdHMaOW1sYWdlbnRzL2VudnMvY29tbXVuaWNhdG9yX29iamVjdHMvYnJh", + "aW5fcGFyYW1ldGVycy5wcm90bxo/bWxhZ2VudHMvZW52cy9jb21tdW5pY2F0", + "b3Jfb2JqZWN0cy9lbnZpcm9ubWVudF9wYXJhbWV0ZXJzLnByb3RvIusBCiBV", + "bml0eVJMSW5pdGlhbGl6YXRpb25PdXRwdXRQcm90bxIMCgRuYW1lGAEgASgJ", + "Eg8KB3ZlcnNpb24YAiABKAkSEAoIbG9nX3BhdGgYAyABKAkSRAoQYnJhaW5f", + "cGFyYW1ldGVycxgFIAMoCzIqLmNvbW11bmljYXRvcl9vYmplY3RzLkJyYWlu", + "UGFyYW1ldGVyc1Byb3RvElAKFmVudmlyb25tZW50X3BhcmFtZXRlcnMYBiAB", + "KAsyMC5jb21tdW5pY2F0b3Jfb2JqZWN0cy5FbnZpcm9ubWVudFBhcmFtZXRl", + "cnNQcm90b0IfqgIcTUxBZ2VudHMuQ29tbXVuaWNhdG9yT2JqZWN0c2IGcHJv", + "dG8z")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, - new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.BrainParametersProtoReflection.Descriptor, global::MLAgents.CommunicatorObjects.EnvironmentParametersProtoReflection.Descriptor, }, + new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.BrainParametersReflection.Descriptor, global::MLAgents.CommunicatorObjects.EnvironmentParametersReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { - new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityRLInitializationOutput), global::MLAgents.CommunicatorObjects.UnityRLInitializationOutput.Parser, new[]{ "Name", "Version", "LogPath", "BrainParameters", "EnvironmentParameters" }, null, null, null) + new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityRLInitializationOutputProto), global::MLAgents.CommunicatorObjects.UnityRLInitializationOutputProto.Parser, new[]{ "Name", "Version", "LogPath", "BrainParameters", "EnvironmentParameters" }, null, null, null) })); } #endregion @@ -49,11 +49,11 @@ static UnityRlInitializationOutputReflection() { /// /// The request message containing the academy's parameters. /// - public sealed partial class UnityRLInitializationOutput : pb::IMessage { - private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new UnityRLInitializationOutput()); + public sealed partial class UnityRLInitializationOutputProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new UnityRLInitializationOutputProto()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public static pb::MessageParser Parser { get { return _parser; } } + public static pb::MessageParser Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { @@ -66,25 +66,25 @@ public sealed partial class UnityRLInitializationOutput : pb::IMessageField number for the "name" field.
@@ -143,11 +143,11 @@ public string LogPath { [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { - return Equals(other as UnityRLInitializationOutput); + return Equals(other as UnityRLInitializationOutputProto); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public bool Equals(UnityRLInitializationOutput other) { + public bool Equals(UnityRLInitializationOutputProto other) { if (ReferenceEquals(other, null)) { return false; } @@ -228,7 +228,7 @@ public int CalculateSize() { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public void MergeFrom(UnityRLInitializationOutput other) { + public void MergeFrom(UnityRLInitializationOutputProto other) { if (other == null) { return; } @@ -244,7 +244,7 @@ public void MergeFrom(UnityRLInitializationOutput other) { brainParameters_.Add(other.brainParameters_); if (other.environmentParameters_ != null) { if (environmentParameters_ == null) { - EnvironmentParameters = new global::MLAgents.CommunicatorObjects.EnvironmentParametersProto(); + environmentParameters_ = new global::MLAgents.CommunicatorObjects.EnvironmentParametersProto(); } EnvironmentParameters.MergeFrom(other.EnvironmentParameters); } @@ -277,9 +277,9 @@ public void MergeFrom(pb::CodedInputStream input) { } case 50: { if (environmentParameters_ == null) { - EnvironmentParameters = new global::MLAgents.CommunicatorObjects.EnvironmentParametersProto(); + environmentParameters_ = new global::MLAgents.CommunicatorObjects.EnvironmentParametersProto(); } - input.ReadMessage(EnvironmentParameters); + input.ReadMessage(environmentParameters_); break; } } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityRlInitializationOutput.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityRlInitializationOutput.cs.meta similarity index 83% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityRlInitializationOutput.cs.meta rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityRlInitializationOutput.cs.meta index 1afe3779a9..bbc4dba7c4 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityRlInitializationOutput.cs.meta +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityRlInitializationOutput.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: f7ac9dd525a2246688054b2442eda28a +guid: e0bcb88495d5d48229140a2080dfd297 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityRlInput.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityRlInput.cs similarity index 78% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityRlInput.cs rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityRlInput.cs index 9aa168695c..2a9f416de7 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityRlInput.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityRlInput.cs @@ -25,26 +25,26 @@ static UnityRlInputReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "CjdtbGFnZW50cy9lbnZzL2NvbW11bmljYXRvcl9vYmplY3RzL3VuaXR5X3Js", - "X2lucHV0LnByb3RvEhRjb21tdW5pY2F0b3Jfb2JqZWN0cxo7bWxhZ2VudHMv", - "ZW52cy9jb21tdW5pY2F0b3Jfb2JqZWN0cy9hZ2VudF9hY3Rpb25fcHJvdG8u", - "cHJvdG8aRW1sYWdlbnRzL2VudnMvY29tbXVuaWNhdG9yX29iamVjdHMvZW52", - "aXJvbm1lbnRfcGFyYW1ldGVyc19wcm90by5wcm90bxo2bWxhZ2VudHMvZW52", - "cy9jb21tdW5pY2F0b3Jfb2JqZWN0cy9jb21tYW5kX3Byb3RvLnByb3RvIrQD", - "CgxVbml0eVJMSW5wdXQSSwoNYWdlbnRfYWN0aW9ucxgBIAMoCzI0LmNvbW11", - "bmljYXRvcl9vYmplY3RzLlVuaXR5UkxJbnB1dC5BZ2VudEFjdGlvbnNFbnRy", - "eRJQChZlbnZpcm9ubWVudF9wYXJhbWV0ZXJzGAIgASgLMjAuY29tbXVuaWNh", - "dG9yX29iamVjdHMuRW52aXJvbm1lbnRQYXJhbWV0ZXJzUHJvdG8SEwoLaXNf", - "dHJhaW5pbmcYAyABKAgSMwoHY29tbWFuZBgEIAEoDjIiLmNvbW11bmljYXRv", - "cl9vYmplY3RzLkNvbW1hbmRQcm90bxpNChRMaXN0QWdlbnRBY3Rpb25Qcm90", - "bxI1CgV2YWx1ZRgBIAMoCzImLmNvbW11bmljYXRvcl9vYmplY3RzLkFnZW50", - "QWN0aW9uUHJvdG8abAoRQWdlbnRBY3Rpb25zRW50cnkSCwoDa2V5GAEgASgJ", - "EkYKBXZhbHVlGAIgASgLMjcuY29tbXVuaWNhdG9yX29iamVjdHMuVW5pdHlS", - "TElucHV0Lkxpc3RBZ2VudEFjdGlvblByb3RvOgI4AUIfqgIcTUxBZ2VudHMu", - "Q29tbXVuaWNhdG9yT2JqZWN0c2IGcHJvdG8z")); + "X2lucHV0LnByb3RvEhRjb21tdW5pY2F0b3Jfb2JqZWN0cxo1bWxhZ2VudHMv", + "ZW52cy9jb21tdW5pY2F0b3Jfb2JqZWN0cy9hZ2VudF9hY3Rpb24ucHJvdG8a", + "P21sYWdlbnRzL2VudnMvY29tbXVuaWNhdG9yX29iamVjdHMvZW52aXJvbm1l", + "bnRfcGFyYW1ldGVycy5wcm90bxowbWxhZ2VudHMvZW52cy9jb21tdW5pY2F0", + "b3Jfb2JqZWN0cy9jb21tYW5kLnByb3RvIsMDChFVbml0eVJMSW5wdXRQcm90", + "bxJQCg1hZ2VudF9hY3Rpb25zGAEgAygLMjkuY29tbXVuaWNhdG9yX29iamVj", + "dHMuVW5pdHlSTElucHV0UHJvdG8uQWdlbnRBY3Rpb25zRW50cnkSUAoWZW52", + "aXJvbm1lbnRfcGFyYW1ldGVycxgCIAEoCzIwLmNvbW11bmljYXRvcl9vYmpl", + "Y3RzLkVudmlyb25tZW50UGFyYW1ldGVyc1Byb3RvEhMKC2lzX3RyYWluaW5n", + "GAMgASgIEjMKB2NvbW1hbmQYBCABKA4yIi5jb21tdW5pY2F0b3Jfb2JqZWN0", + "cy5Db21tYW5kUHJvdG8aTQoUTGlzdEFnZW50QWN0aW9uUHJvdG8SNQoFdmFs", + "dWUYASADKAsyJi5jb21tdW5pY2F0b3Jfb2JqZWN0cy5BZ2VudEFjdGlvblBy", + "b3RvGnEKEUFnZW50QWN0aW9uc0VudHJ5EgsKA2tleRgBIAEoCRJLCgV2YWx1", + "ZRgCIAEoCzI8LmNvbW11bmljYXRvcl9vYmplY3RzLlVuaXR5UkxJbnB1dFBy", + "b3RvLkxpc3RBZ2VudEFjdGlvblByb3RvOgI4AUIfqgIcTUxBZ2VudHMuQ29t", + "bXVuaWNhdG9yT2JqZWN0c2IGcHJvdG8z")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, - new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.AgentActionProtoReflection.Descriptor, global::MLAgents.CommunicatorObjects.EnvironmentParametersProtoReflection.Descriptor, global::MLAgents.CommunicatorObjects.CommandProtoReflection.Descriptor, }, + new pbr::FileDescriptor[] { global::MLAgents.CommunicatorObjects.AgentActionReflection.Descriptor, global::MLAgents.CommunicatorObjects.EnvironmentParametersReflection.Descriptor, global::MLAgents.CommunicatorObjects.CommandReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { - new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityRLInput), global::MLAgents.CommunicatorObjects.UnityRLInput.Parser, new[]{ "AgentActions", "EnvironmentParameters", "IsTraining", "Command" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityRLInput.Types.ListAgentActionProto), global::MLAgents.CommunicatorObjects.UnityRLInput.Types.ListAgentActionProto.Parser, new[]{ "Value" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityRLInputProto), global::MLAgents.CommunicatorObjects.UnityRLInputProto.Parser, new[]{ "AgentActions", "EnvironmentParameters", "IsTraining", "Command" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::MLAgents.CommunicatorObjects.UnityRLInputProto.Types.ListAgentActionProto), global::MLAgents.CommunicatorObjects.UnityRLInputProto.Types.ListAgentActionProto.Parser, new[]{ "Value" }, null, null, null), null, }) })); } @@ -52,11 +52,11 @@ static UnityRlInputReflection() { } #region Messages - public sealed partial class UnityRLInput : pb::IMessage { - private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new UnityRLInput()); + public sealed partial class UnityRLInputProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new UnityRLInputProto()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public static pb::MessageParser Parser { get { return _parser; } } + public static pb::MessageParser Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { @@ -69,33 +69,33 @@ public sealed partial class UnityRLInput : pb::IMessage { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public UnityRLInput() { + public UnityRLInputProto() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public UnityRLInput(UnityRLInput other) : this() { + public UnityRLInputProto(UnityRLInputProto other) : this() { agentActions_ = other.agentActions_.Clone(); - environmentParameters_ = other.environmentParameters_ != null ? other.environmentParameters_.Clone() : null; + EnvironmentParameters = other.environmentParameters_ != null ? other.EnvironmentParameters.Clone() : null; isTraining_ = other.isTraining_; command_ = other.command_; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public UnityRLInput Clone() { - return new UnityRLInput(this); + public UnityRLInputProto Clone() { + return new UnityRLInputProto(this); } /// Field number for the "agent_actions" field. public const int AgentActionsFieldNumber = 1; - private static readonly pbc::MapField.Codec _map_agentActions_codec - = new pbc::MapField.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForMessage(18, global::MLAgents.CommunicatorObjects.UnityRLInput.Types.ListAgentActionProto.Parser), 10); - private readonly pbc::MapField agentActions_ = new pbc::MapField(); + private static readonly pbc::MapField.Codec _map_agentActions_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForMessage(18, global::MLAgents.CommunicatorObjects.UnityRLInputProto.Types.ListAgentActionProto.Parser), 10); + private readonly pbc::MapField agentActions_ = new pbc::MapField(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public pbc::MapField AgentActions { + public pbc::MapField AgentActions { get { return agentActions_; } } @@ -134,11 +134,11 @@ public bool IsTraining { [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { - return Equals(other as UnityRLInput); + return Equals(other as UnityRLInputProto); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public bool Equals(UnityRLInput other) { + public bool Equals(UnityRLInputProto other) { if (ReferenceEquals(other, null)) { return false; } @@ -210,14 +210,14 @@ public int CalculateSize() { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public void MergeFrom(UnityRLInput other) { + public void MergeFrom(UnityRLInputProto other) { if (other == null) { return; } agentActions_.Add(other.agentActions_); if (other.environmentParameters_ != null) { if (environmentParameters_ == null) { - EnvironmentParameters = new global::MLAgents.CommunicatorObjects.EnvironmentParametersProto(); + environmentParameters_ = new global::MLAgents.CommunicatorObjects.EnvironmentParametersProto(); } EnvironmentParameters.MergeFrom(other.EnvironmentParameters); } @@ -244,9 +244,9 @@ public void MergeFrom(pb::CodedInputStream input) { } case 18: { if (environmentParameters_ == null) { - EnvironmentParameters = new global::MLAgents.CommunicatorObjects.EnvironmentParametersProto(); + environmentParameters_ = new global::MLAgents.CommunicatorObjects.EnvironmentParametersProto(); } - input.ReadMessage(EnvironmentParameters); + input.ReadMessage(environmentParameters_); break; } case 24: { @@ -254,7 +254,7 @@ public void MergeFrom(pb::CodedInputStream input) { break; } case 32: { - Command = (global::MLAgents.CommunicatorObjects.CommandProto) input.ReadEnum(); + command_ = (global::MLAgents.CommunicatorObjects.CommandProto) input.ReadEnum(); break; } } @@ -262,7 +262,7 @@ public void MergeFrom(pb::CodedInputStream input) { } #region Nested types - /// Container for nested types declared in the UnityRLInput message type. + /// Container for nested types declared in the UnityRLInputProto message type. [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static partial class Types { public sealed partial class ListAgentActionProto : pb::IMessage { @@ -273,7 +273,7 @@ public sealed partial class ListAgentActionProto : pb::IMessage { - private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new UnityRLOutput()); + public sealed partial class UnityRLOutputProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new UnityRLOutputProto()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public static pb::MessageParser Parser { get { return _parser; } } + public static pb::MessageParser Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { @@ -63,59 +63,46 @@ public sealed partial class UnityRLOutput : pb::IMessage { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public UnityRLOutput() { + public UnityRLOutputProto() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public UnityRLOutput(UnityRLOutput other) : this() { - globalDone_ = other.globalDone_; + public UnityRLOutputProto(UnityRLOutputProto other) : this() { agentInfos_ = other.agentInfos_.Clone(); _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public UnityRLOutput Clone() { - return new UnityRLOutput(this); - } - - /// Field number for the "global_done" field. - public const int GlobalDoneFieldNumber = 1; - private bool globalDone_; - [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public bool GlobalDone { - get { return globalDone_; } - set { - globalDone_ = value; - } + public UnityRLOutputProto Clone() { + return new UnityRLOutputProto(this); } /// Field number for the "agentInfos" field. public const int AgentInfosFieldNumber = 2; - private static readonly pbc::MapField.Codec _map_agentInfos_codec - = new pbc::MapField.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForMessage(18, global::MLAgents.CommunicatorObjects.UnityRLOutput.Types.ListAgentInfoProto.Parser), 18); - private readonly pbc::MapField agentInfos_ = new pbc::MapField(); + private static readonly pbc::MapField.Codec _map_agentInfos_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForMessage(18, global::MLAgents.CommunicatorObjects.UnityRLOutputProto.Types.ListAgentInfoProto.Parser), 18); + private readonly pbc::MapField agentInfos_ = new pbc::MapField(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public pbc::MapField AgentInfos { + public pbc::MapField AgentInfos { get { return agentInfos_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { - return Equals(other as UnityRLOutput); + return Equals(other as UnityRLOutputProto); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public bool Equals(UnityRLOutput other) { + public bool Equals(UnityRLOutputProto other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } - if (GlobalDone != other.GlobalDone) return false; if (!AgentInfos.Equals(other.AgentInfos)) return false; return Equals(_unknownFields, other._unknownFields); } @@ -123,7 +110,6 @@ public bool Equals(UnityRLOutput other) { [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; - if (GlobalDone != false) hash ^= GlobalDone.GetHashCode(); hash ^= AgentInfos.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); @@ -138,10 +124,6 @@ public override string ToString() { [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { - if (GlobalDone != false) { - output.WriteRawTag(8); - output.WriteBool(GlobalDone); - } agentInfos_.WriteTo(output, _map_agentInfos_codec); if (_unknownFields != null) { _unknownFields.WriteTo(output); @@ -151,9 +133,6 @@ public void WriteTo(pb::CodedOutputStream output) { [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; - if (GlobalDone != false) { - size += 1 + 1; - } size += agentInfos_.CalculateSize(_map_agentInfos_codec); if (_unknownFields != null) { size += _unknownFields.CalculateSize(); @@ -162,13 +141,10 @@ public int CalculateSize() { } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] - public void MergeFrom(UnityRLOutput other) { + public void MergeFrom(UnityRLOutputProto other) { if (other == null) { return; } - if (other.GlobalDone != false) { - GlobalDone = other.GlobalDone; - } agentInfos_.Add(other.agentInfos_); _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } @@ -181,10 +157,6 @@ public void MergeFrom(pb::CodedInputStream input) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; - case 8: { - GlobalDone = input.ReadBool(); - break; - } case 18: { agentInfos_.AddEntriesFrom(input, _map_agentInfos_codec); break; @@ -194,7 +166,7 @@ public void MergeFrom(pb::CodedInputStream input) { } #region Nested types - /// Container for nested types declared in the UnityRLOutput message type. + /// Container for nested types declared in the UnityRLOutputProto message type. [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static partial class Types { public sealed partial class ListAgentInfoProto : pb::IMessage { @@ -205,7 +177,7 @@ public sealed partial class ListAgentInfoProto : pb::IMessage __Marshaller_communicator_objects_UnityMessage = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::MLAgents.CommunicatorObjects.UnityMessage.Parser.ParseFrom); + static readonly grpc::Marshaller __Marshaller_communicator_objects_UnityMessageProto = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::MLAgents.CommunicatorObjects.UnityMessageProto.Parser.ParseFrom); - static readonly grpc::Method __Method_Exchange = new grpc::Method( + static readonly grpc::Method __Method_Exchange = new grpc::Method( grpc::MethodType.Unary, __ServiceName, "Exchange", - __Marshaller_communicator_objects_UnityMessage, - __Marshaller_communicator_objects_UnityMessage); + __Marshaller_communicator_objects_UnityMessageProto, + __Marshaller_communicator_objects_UnityMessageProto); /// Service descriptor public static global::Google.Protobuf.Reflection.ServiceDescriptor Descriptor @@ -32,8 +28,8 @@ public static partial class UnityToExternal get { return global::MLAgents.CommunicatorObjects.UnityToExternalReflection.Descriptor.Services[0]; } } - /// Base class for server-side implementations of UnityToExternal - public abstract partial class UnityToExternalBase + /// Base class for server-side implementations of UnityToExternalProto + public abstract partial class UnityToExternalProtoBase { /// /// Sends the academy parameters @@ -41,33 +37,33 @@ public abstract partial class UnityToExternalBase /// The request received from the client. /// The context of the server-side call handler being invoked. /// The response to send back to the client (wrapped by a task). - public virtual global::System.Threading.Tasks.Task Exchange(global::MLAgents.CommunicatorObjects.UnityMessage request, grpc::ServerCallContext context) + public virtual global::System.Threading.Tasks.Task Exchange(global::MLAgents.CommunicatorObjects.UnityMessageProto request, grpc::ServerCallContext context) { throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, "")); } } - /// Client for UnityToExternal - public partial class UnityToExternalClient : grpc::ClientBase + /// Client for UnityToExternalProto + public partial class UnityToExternalProtoClient : grpc::ClientBase { - /// Creates a new client for UnityToExternal + /// Creates a new client for UnityToExternalProto /// The channel to use to make remote calls. - public UnityToExternalClient(grpc::Channel channel) : base(channel) + public UnityToExternalProtoClient(grpc::Channel channel) : base(channel) { } - /// Creates a new client for UnityToExternal that uses a custom CallInvoker. + /// Creates a new client for UnityToExternalProto that uses a custom CallInvoker. /// The callInvoker to use to make remote calls. - public UnityToExternalClient(grpc::CallInvoker callInvoker) : base(callInvoker) + public UnityToExternalProtoClient(grpc::CallInvoker callInvoker) : base(callInvoker) { } /// Protected parameterless constructor to allow creation of test doubles. - protected UnityToExternalClient() : base() + protected UnityToExternalProtoClient() : base() { } /// Protected constructor to allow creation of configured clients. /// The client configuration. - protected UnityToExternalClient(ClientBaseConfiguration configuration) : base(configuration) + protected UnityToExternalProtoClient(ClientBaseConfiguration configuration) : base(configuration) { } @@ -79,7 +75,7 @@ protected UnityToExternalClient(ClientBaseConfiguration configuration) : base(co /// An optional deadline for the call. The call will be cancelled if deadline is hit. /// An optional token for canceling the call. /// The response received from the server. - public virtual global::MLAgents.CommunicatorObjects.UnityMessage Exchange(global::MLAgents.CommunicatorObjects.UnityMessage request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken)) + public virtual global::MLAgents.CommunicatorObjects.UnityMessageProto Exchange(global::MLAgents.CommunicatorObjects.UnityMessageProto request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken)) { return Exchange(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } @@ -89,7 +85,7 @@ protected UnityToExternalClient(ClientBaseConfiguration configuration) : base(co /// The request to send to the server. /// The options for the call. /// The response received from the server. - public virtual global::MLAgents.CommunicatorObjects.UnityMessage Exchange(global::MLAgents.CommunicatorObjects.UnityMessage request, grpc::CallOptions options) + public virtual global::MLAgents.CommunicatorObjects.UnityMessageProto Exchange(global::MLAgents.CommunicatorObjects.UnityMessageProto request, grpc::CallOptions options) { return CallInvoker.BlockingUnaryCall(__Method_Exchange, null, options, request); } @@ -101,7 +97,7 @@ protected UnityToExternalClient(ClientBaseConfiguration configuration) : base(co /// An optional deadline for the call. The call will be cancelled if deadline is hit. /// An optional token for canceling the call. /// The call object. - public virtual grpc::AsyncUnaryCall ExchangeAsync(global::MLAgents.CommunicatorObjects.UnityMessage request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken)) + public virtual grpc::AsyncUnaryCall ExchangeAsync(global::MLAgents.CommunicatorObjects.UnityMessageProto request, grpc::Metadata headers = null, global::System.DateTime? deadline = null, global::System.Threading.CancellationToken cancellationToken = default(global::System.Threading.CancellationToken)) { return ExchangeAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } @@ -111,20 +107,20 @@ protected UnityToExternalClient(ClientBaseConfiguration configuration) : base(co /// The request to send to the server. /// The options for the call. /// The call object. - public virtual grpc::AsyncUnaryCall ExchangeAsync(global::MLAgents.CommunicatorObjects.UnityMessage request, grpc::CallOptions options) + public virtual grpc::AsyncUnaryCall ExchangeAsync(global::MLAgents.CommunicatorObjects.UnityMessageProto request, grpc::CallOptions options) { return CallInvoker.AsyncUnaryCall(__Method_Exchange, null, options, request); } /// Creates a new instance of client from given ClientBaseConfiguration. - protected override UnityToExternalClient NewInstance(ClientBaseConfiguration configuration) + protected override UnityToExternalProtoClient NewInstance(ClientBaseConfiguration configuration) { - return new UnityToExternalClient(configuration); + return new UnityToExternalProtoClient(configuration); } } /// Creates service definition that can be registered with a server /// An object implementing the server-side handling logic. - public static grpc::ServerServiceDefinition BindService(UnityToExternalBase serviceImpl) + public static grpc::ServerServiceDefinition BindService(UnityToExternalProtoBase serviceImpl) { return grpc::ServerServiceDefinition.CreateBuilder() .AddMethod(__Method_Exchange, serviceImpl.Exchange).Build(); @@ -133,5 +129,4 @@ protected override UnityToExternalClient NewInstance(ClientBaseConfiguration con } } #endregion - -#endif \ No newline at end of file +#endif diff --git a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityToExternalGrpc.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityToExternalGrpc.cs.meta similarity index 83% rename from UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityToExternalGrpc.cs.meta rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityToExternalGrpc.cs.meta index 3397b63763..620a3f1d44 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects/UnityToExternalGrpc.cs.meta +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects/UnityToExternalGrpc.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 0378b2871a6c649f69c2f32d5c0fb045 +guid: 6c0f560328e7343499ad203c75c11741 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Grpc/GrpcExtensions.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/GrpcExtensions.cs new file mode 100644 index 0000000000..cc6cf9534c --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/GrpcExtensions.cs @@ -0,0 +1,194 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Google.Protobuf; +using Google.Protobuf.Collections; +using MLAgents.CommunicatorObjects; +using MLAgents.Sensor; +using UnityEngine; + +namespace MLAgents +{ + public static class GrpcExtensions + { + /// + /// Converts a AgentInfo to a protobuf generated AgentInfoProto + /// + /// The protobuf version of the AgentInfo. + public static AgentInfoProto ToProto(this AgentInfo ai) + { + var agentInfoProto = new AgentInfoProto + { + StackedVectorObservation = { ai.stackedVectorObservation }, + StoredVectorActions = { ai.storedVectorActions }, + StoredTextActions = ai.storedTextActions, + TextObservation = ai.textObservation, + Reward = ai.reward, + MaxStepReached = ai.maxStepReached, + Done = ai.done, + Id = ai.id, + CustomObservation = ai.customObservation + }; + if (ai.memories != null) + { + agentInfoProto.Memories.Add(ai.memories); + } + + if (ai.actionMasks != null) + { + agentInfoProto.ActionMask.AddRange(ai.actionMasks); + } + + if (ai.compressedObservations != null) + { + foreach (var obs in ai.compressedObservations) + { + agentInfoProto.CompressedObservations.Add(obs.ToProto()); + } + } + + return agentInfoProto; + } + + /// + /// Converts a Brain into to a Protobuf BrainInfoProto so it can be sent + /// + /// The BrainInfoProto generated. + /// The instance of BrainParameter to extend. + /// The name of the brain. + /// Whether or not the Brain is training. + public static BrainParametersProto ToProto(this BrainParameters bp, string name, bool isTraining) + { + var brainParametersProto = new BrainParametersProto + { + VectorObservationSize = bp.vectorObservationSize, + NumStackedVectorObservations = bp.numStackedVectorObservations, + VectorActionSize = { bp.vectorActionSize }, + VectorActionSpaceType = + (SpaceTypeProto)bp.vectorActionSpaceType, + BrainName = name, + IsTraining = isTraining + }; + brainParametersProto.VectorActionDescriptions.AddRange(bp.vectorActionDescriptions); + return brainParametersProto; + } + + /// + /// Convert metadata object to proto object. + /// + public static DemonstrationMetaProto ToProto(this DemonstrationMetaData dm) + { + var demoProto = new DemonstrationMetaProto + { + ApiVersion = DemonstrationMetaData.ApiVersion, + MeanReward = dm.meanReward, + NumberSteps = dm.numberExperiences, + NumberEpisodes = dm.numberEpisodes, + DemonstrationName = dm.demonstrationName + }; + return demoProto; + } + + /// + /// Initialize metadata values based on proto object. + /// + public static DemonstrationMetaData ToDemonstrationMetaData(this DemonstrationMetaProto demoProto) + { + var dm = new DemonstrationMetaData + { + numberEpisodes = demoProto.NumberEpisodes, + numberExperiences = demoProto.NumberSteps, + meanReward = demoProto.MeanReward, + demonstrationName = demoProto.DemonstrationName + }; + if (demoProto.ApiVersion != DemonstrationMetaData.ApiVersion) + { + throw new Exception("API versions of demonstration are incompatible."); + } + return dm; + } + + /// + /// Convert a BrainParametersProto to a BrainParameters struct. + /// + /// An instance of a brain parameters protobuf object. + /// A BrainParameters struct. + public static BrainParameters ToBrainParameters(this BrainParametersProto bpp) + { + var bp = new BrainParameters + { + vectorObservationSize = bpp.VectorObservationSize, + numStackedVectorObservations = bpp.NumStackedVectorObservations, + vectorActionSize = bpp.VectorActionSize.ToArray(), + vectorActionDescriptions = bpp.VectorActionDescriptions.ToArray(), + vectorActionSpaceType = (SpaceType)bpp.VectorActionSpaceType + }; + return bp; + } + + /// + /// Convert a MapField to ResetParameters. + /// + /// The mapping of strings to floats from a protobuf MapField. + /// + public static ResetParameters ToResetParameters(this MapField floatParams) + { + return new ResetParameters(floatParams); + } + + /// + /// Convert an EnvironmnetParametersProto protobuf object to an EnvironmentResetParameters struct. + /// + /// The instance of the EnvironmentParametersProto object. + /// A new EnvironmentResetParameters struct. + public static EnvironmentResetParameters ToEnvironmentResetParameters(this EnvironmentParametersProto epp) + { + return new EnvironmentResetParameters + { + resetParameters = epp.FloatParameters?.ToResetParameters(), + customResetParameters = epp.CustomResetParameters + }; + } + + public static UnityRLInitParameters ToUnityRLInitParameters(this UnityRLInitializationInputProto inputProto) + { + return new UnityRLInitParameters + { + seed = inputProto.Seed + }; + } + + public static AgentAction ToAgentAction(this AgentActionProto aap) + { + return new AgentAction + { + vectorActions = aap.VectorActions.ToArray(), + textActions = aap.TextActions, + memories = aap.Memories.ToList(), + value = aap.Value, + customAction = aap.CustomAction + }; + } + + public static List ToAgentActionList(this UnityRLInputProto.Types.ListAgentActionProto proto) + { + var agentActions = new List(proto.Value.Count); + foreach (var ap in proto.Value) + { + agentActions.Add(ap.ToAgentAction()); + } + return agentActions; + } + + public static CompressedObservationProto ToProto(this CompressedObservation obs) + { + var obsProto = new CompressedObservationProto + { + Data = ByteString.CopyFrom(obs.Data), + CompressionType = (CompressionTypeProto) obs.CompressionType, + }; + obsProto.Shape.AddRange(obs.Shape); + return obsProto; + } + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Grpc/GrpcExtensions.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/GrpcExtensions.cs.meta new file mode 100644 index 0000000000..31c109f8fa --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/GrpcExtensions.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: 02e8742d8a124607bef3b5ff8b9dd3d0 +timeCreated: 1569444771 \ No newline at end of file diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Grpc/RpcCommunicator.cs b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/RpcCommunicator.cs new file mode 100644 index 0000000000..0beddc8356 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/RpcCommunicator.cs @@ -0,0 +1,467 @@ +# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX +using Grpc.Core; +#endif +#if UNITY_EDITOR +using UnityEditor; +#endif +using System; +using System.Collections.Generic; +using System.Linq; +using UnityEngine; +using MLAgents.CommunicatorObjects; + +namespace MLAgents +{ + /// Responsible for communication with External using gRPC. + public class RpcCommunicator : ICommunicator + { + public event QuitCommandHandler QuitCommandReceived; + public event ResetCommandHandler ResetCommandReceived; + public event RLInputReceivedHandler RLInputReceived; + + /// If true, the communication is active. + bool m_IsOpen; + + /// The default number of agents in the scene + private const int k_NumAgents = 32; + + /// Keeps track of the agents of each brain on the current step + Dictionary> m_CurrentAgents = + new Dictionary>(); + + /// The current UnityRLOutput to be sent when all the brains queried the communicator + UnityRLOutputProto m_CurrentUnityRlOutput = + new UnityRLOutputProto(); + + Dictionary> m_LastActionsReceived = + new Dictionary>(); + + // Brains that we have sent over the communicator with agents. + HashSet m_sentBrainKeys = new HashSet(); + Dictionary m_unsentBrainKeys = new Dictionary(); + + +# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX + /// The Unity to External client. + UnityToExternalProto.UnityToExternalProtoClient m_Client; +#endif + /// The communicator parameters sent at construction + CommunicatorInitParameters m_CommunicatorInitParameters; + + /// + /// Initializes a new instance of the RPCCommunicator class. + /// + /// Communicator parameters. + public RpcCommunicator(CommunicatorInitParameters communicatorInitParameters) + { + m_CommunicatorInitParameters = communicatorInitParameters; + } + + #region Initialization + + /// + /// Sends the initialization parameters through the Communicator. + /// Is used by the academy to send initialization parameters to the communicator. + /// + /// The External Initialization Parameters received. + /// The Unity Initialization Parameters to be sent. + public UnityRLInitParameters Initialize(CommunicatorInitParameters initParameters) + { + var academyParameters = new UnityRLInitializationOutputProto + { + Name = initParameters.name, + Version = initParameters.version + }; + + academyParameters.EnvironmentParameters = new EnvironmentParametersProto(); + + var resetParameters = initParameters.environmentResetParameters.resetParameters; + foreach (var key in resetParameters.Keys) + { + academyParameters.EnvironmentParameters.FloatParameters.Add(key, resetParameters[key]); + } + + UnityInputProto input; + UnityInputProto initializationInput; + try + { + initializationInput = Initialize( + new UnityOutputProto + { + RlInitializationOutput = academyParameters + }, + out input); + } + catch + { + var exceptionMessage = "The Communicator was unable to connect. Please make sure the External " + + "process is ready to accept communication with Unity."; + + // Check for common error condition and add details to the exception message. + var httpProxy = Environment.GetEnvironmentVariable("HTTP_PROXY"); + var httpsProxy = Environment.GetEnvironmentVariable("HTTPS_PROXY"); + if (httpProxy != null || httpsProxy != null) + { + exceptionMessage += " Try removing HTTP_PROXY and HTTPS_PROXY from the" + + "environment variables and try again."; + } + throw new UnityAgentsException(exceptionMessage); + } + + UpdateEnvironmentWithInput(input.RlInput); + return initializationInput.RlInitializationInput.ToUnityRLInitParameters(); + } + + /// + /// Adds the brain to the list of brains which will be sending information to External. + /// + /// Brain key. + /// Brain parameters needed to send to the trainer. + public void SubscribeBrain(string brainKey, BrainParameters brainParameters) + { + if (m_CurrentAgents.ContainsKey(brainKey)) + { + return; + } + m_CurrentAgents[brainKey] = new List(k_NumAgents); + m_CurrentUnityRlOutput.AgentInfos.Add( + brainKey, + new UnityRLOutputProto.Types.ListAgentInfoProto() + ); + + CacheBrainParameters(brainKey, brainParameters); + } + + void UpdateEnvironmentWithInput(UnityRLInputProto rlInput) + { + SendRLInputReceivedEvent(rlInput.IsTraining); + SendCommandEvent(rlInput.Command, rlInput.EnvironmentParameters); + } + + private UnityInputProto Initialize(UnityOutputProto unityOutput, + out UnityInputProto unityInput) + { +# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX + m_IsOpen = true; + var channel = new Channel( + "localhost:" + m_CommunicatorInitParameters.port, + ChannelCredentials.Insecure); + + m_Client = new UnityToExternalProto.UnityToExternalProtoClient(channel); + var result = m_Client.Exchange(WrapMessage(unityOutput, 200)); + unityInput = m_Client.Exchange(WrapMessage(null, 200)).UnityInput; +#if UNITY_EDITOR +#if UNITY_2017_2_OR_NEWER + EditorApplication.playModeStateChanged += HandleOnPlayModeChanged; +#else + EditorApplication.playmodeStateChanged += HandleOnPlayModeChanged; +#endif +#endif + return result.UnityInput; +#else + throw new UnityAgentsException( + "You cannot perform training on this platform."); +#endif + } + + #endregion + + #region Destruction + + /// + /// Close the communicator gracefully on both sides of the communication. + /// + public void Dispose() + { +# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX + if (!m_IsOpen) + { + return; + } + + try + { + m_Client.Exchange(WrapMessage(null, 400)); + m_IsOpen = false; + } + catch + { + // ignored + } +#else + throw new UnityAgentsException( + "You cannot perform training on this platform."); +#endif + } + + #endregion + + #region Sending Events + private void SendCommandEvent(CommandProto command, EnvironmentParametersProto environmentParametersProto) + { + switch (command) + { + case CommandProto.Quit: + { + QuitCommandReceived?.Invoke(); + return; + } + case CommandProto.Reset: + { + ResetCommandReceived?.Invoke(environmentParametersProto.ToEnvironmentResetParameters()); + return; + } + default: + { + return; + } + } + } + + private void SendRLInputReceivedEvent(bool isTraining) + { + RLInputReceived?.Invoke(new UnityRLInputParameters { isTraining = isTraining }); + } + + #endregion + + #region Sending and retreiving data + + public void DecideBatch() + { + if (m_CurrentAgents.Values.All(l => l.Count == 0)) + { + return; + } + foreach (var brainKey in m_CurrentAgents.Keys) + { + using (TimerStack.Instance.Scoped("AgentInfo.ToProto")) + { + if (m_CurrentAgents[brainKey].Count > 0) + { + foreach (var agent in m_CurrentAgents[brainKey]) + { + // Update the sensor data on the AgentInfo + agent.GenerateSensorData(); + var agentInfoProto = agent.Info.ToProto(); + m_CurrentUnityRlOutput.AgentInfos[brainKey].Value.Add(agentInfoProto); + } + + } + } + } + SendBatchedMessageHelper(); + foreach (var brainKey in m_CurrentAgents.Keys) + { + m_CurrentAgents[brainKey].Clear(); + } + } + + /// + /// Sends the observations of one Agent. + /// + /// Batch Key. + /// Agent info. + public void PutObservations(string brainKey, Agent agent) + { + m_CurrentAgents[brainKey].Add(agent); + } + + /// + /// Helper method that sends the current UnityRLOutput, receives the next UnityInput and + /// Applies the appropriate AgentAction to the agents. + /// + void SendBatchedMessageHelper() + { + var message = new UnityOutputProto + { + RlOutput = m_CurrentUnityRlOutput, + }; + var tempUnityRlInitializationOutput = GetTempUnityRlInitializationOutput(); + if (tempUnityRlInitializationOutput != null) + { + message.RlInitializationOutput = tempUnityRlInitializationOutput; + } + + var input = Exchange(message); + UpdateSentBrainParameters(tempUnityRlInitializationOutput); + + foreach (var k in m_CurrentUnityRlOutput.AgentInfos.Keys) + { + m_CurrentUnityRlOutput.AgentInfos[k].Value.Clear(); + } + + var rlInput = input?.RlInput; + + if (rlInput?.AgentActions == null) + { + return; + } + + UpdateEnvironmentWithInput(rlInput); + + m_LastActionsReceived.Clear(); + foreach (var brainName in rlInput.AgentActions.Keys) + { + if (!m_CurrentAgents[brainName].Any()) + { + continue; + } + + if (!rlInput.AgentActions[brainName].Value.Any()) + { + continue; + } + + var agentActions = rlInput.AgentActions[brainName].ToAgentActionList(); + var numAgents = m_CurrentAgents[brainName].Count; + var agentActionDict = new Dictionary(numAgents); + m_LastActionsReceived[brainName] = agentActionDict; + for (var i = 0; i < numAgents; i++) + { + var agent = m_CurrentAgents[brainName][i]; + var agentAction = agentActions[i]; + agentActionDict[agent] = agentAction; + agent.UpdateAgentAction(agentAction); + } + } + } + + public Dictionary GetActions(string key) + { + return m_LastActionsReceived[key]; + } + + /// + /// Send a UnityOutput and receives a UnityInput. + /// + /// The next UnityInput. + /// The UnityOutput to be sent. + private UnityInputProto Exchange(UnityOutputProto unityOutput) + { +# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX + if (!m_IsOpen) + { + return null; + } + try + { + var message = m_Client.Exchange(WrapMessage(unityOutput, 200)); + if (message.Header.Status == 200) + { + return message.UnityInput; + } + + m_IsOpen = false; + // Not sure if the quit command is actually sent when a + // non 200 message is received. Notify that we are indeed + // quitting. + QuitCommandReceived?.Invoke(); + return message.UnityInput; + } + catch + { + m_IsOpen = false; + QuitCommandReceived?.Invoke(); + return null; + } +#else + throw new UnityAgentsException( + "You cannot perform training on this platform."); +#endif + } + + /// + /// Wraps the UnityOuptut into a message with the appropriate status. + /// + /// The UnityMessage corresponding. + /// The UnityOutput to be wrapped. + /// The status of the message. + private static UnityMessageProto WrapMessage(UnityOutputProto content, int status) + { + return new UnityMessageProto + { + Header = new HeaderProto { Status = status }, + UnityOutput = content + }; + } + + private void CacheBrainParameters(string brainKey, BrainParameters brainParameters) + { + if (m_sentBrainKeys.Contains(brainKey)) + { + return; + } + + // TODO We should check that if m_unsentBrainKeys has brainKey, it equals brainParameters + m_unsentBrainKeys[brainKey] = brainParameters; + } + + private UnityRLInitializationOutputProto GetTempUnityRlInitializationOutput() + { + UnityRLInitializationOutputProto output = null; + foreach (var brainKey in m_unsentBrainKeys.Keys) + { + if (m_CurrentUnityRlOutput.AgentInfos.ContainsKey(brainKey)) + { + if (output == null) + { + output = new UnityRLInitializationOutputProto(); + } + + var brainParameters = m_unsentBrainKeys[brainKey]; + output.BrainParameters.Add(brainParameters.ToProto(brainKey, true)); + } + } + + return output; + } + + private void UpdateSentBrainParameters(UnityRLInitializationOutputProto output) + { + if (output == null) + { + return; + } + + foreach (var brainProto in output.BrainParameters) + { + m_sentBrainKeys.Add(brainProto.BrainName); + m_unsentBrainKeys.Remove(brainProto.BrainName); + } + } + + #endregion + +#if UNITY_EDITOR +#if UNITY_2017_2_OR_NEWER + /// + /// When the editor exits, the communicator must be closed + /// + /// State. + private void HandleOnPlayModeChanged(PlayModeStateChange state) + { + // This method is run whenever the playmode state is changed. + if (state == PlayModeStateChange.ExitingPlayMode) + { + Dispose(); + } + } + +#else + /// + /// When the editor exits, the communicator must be closed + /// + private void HandleOnPlayModeChanged() + { + // This method is run whenever the playmode state is changed. + if (!EditorApplication.isPlayingOrWillChangePlaymode) + { + Close(); + } + } + +#endif +#endif + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/RpcCommunicator.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Grpc/RpcCommunicator.cs.meta similarity index 100% rename from UnitySDK/Assets/ML-Agents/Scripts/RpcCommunicator.cs.meta rename to UnitySDK/Assets/ML-Agents/Scripts/Grpc/RpcCommunicator.cs.meta diff --git a/UnitySDK/Assets/ML-Agents/Scripts/HeuristicBrain.cs b/UnitySDK/Assets/ML-Agents/Scripts/HeuristicBrain.cs deleted file mode 100644 index f4a77bb847..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/HeuristicBrain.cs +++ /dev/null @@ -1,83 +0,0 @@ -using UnityEngine; -using UnityEngine.Serialization; -#if UNITY_EDITOR -using UnityEditor; -#endif - -namespace MLAgents -{ - /// - /// The Heuristic Brain type allows you to hand code an Agent's decision making process. - /// A Heuristic Brain requires an implementation of the Decision interface to which it - /// delegates the decision making process. - /// When yusing a Heuristic Brain, you must give it a Monoscript of a Decision implementation. - /// - [CreateAssetMenu(fileName = "NewHeuristicBrain", menuName = "ML-Agents/Heuristic Brain")] - public class HeuristicBrain : Brain - { - [SerializeField] - [HideInInspector] - public Decision decision; -#if UNITY_EDITOR - [HideInInspector] - public MonoScript decisionScript; -#endif - [FormerlySerializedAs("c_decision")] - [SerializeField] - [HideInInspector] - public string cDecision; - - public void OnValidate() - { -#if UNITY_EDITOR - if (decisionScript != null) - { - cDecision = decisionScript.GetClass().Name; - } - else - { - cDecision = ""; - } -#endif - } - - /// - protected override void Initialize() - { - if ((cDecision != null) && decision == null) - { - decision = CreateInstance(cDecision) as Decision; - decision.brainParameters = brainParameters; - } - } - - ///Uses the Decision Component to decide that action to take - protected override void DecideAction() - { - if (decision == null) - { - throw new UnityAgentsException( - "The Brain is set to Heuristic, but no decision script attached to it"); - } - foreach (var agent in m_AgentInfos.Keys) - { - agent.UpdateVectorAction(decision.Decide( - m_AgentInfos[agent].stackedVectorObservation, - m_AgentInfos[agent].visualObservations, - m_AgentInfos[agent].reward, - m_AgentInfos[agent].done, - m_AgentInfos[agent].memories)); - } - foreach (var agent in m_AgentInfos.Keys) - { - agent.UpdateMemoriesAction(decision.MakeMemory( - m_AgentInfos[agent].stackedVectorObservation, - m_AgentInfos[agent].visualObservations, - m_AgentInfos[agent].reward, - m_AgentInfos[agent].done, - m_AgentInfos[agent].memories)); - } - m_AgentInfos.Clear(); - } - } -} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/HeuristicBrain.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/HeuristicBrain.cs.meta deleted file mode 100755 index e03d712949..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/HeuristicBrain.cs.meta +++ /dev/null @@ -1,12 +0,0 @@ -fileFormatVersion: 2 -guid: 943466ab374444748a364f9d6c3e2fe2 -timeCreated: 1504070366 -licenseType: Free -MonoImporter: - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/ICommunicator.cs b/UnitySDK/Assets/ML-Agents/Scripts/ICommunicator.cs index b6dac08095..936a29394b 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/ICommunicator.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/ICommunicator.cs @@ -1,12 +1,76 @@ +using System; +using System.Collections.Generic; using UnityEngine; using MLAgents.CommunicatorObjects; namespace MLAgents { - public struct CommunicatorParameters + public struct EnvironmentResetParameters { + /// + /// Mapping of string : float which defines which parameters can be + /// reset from python. + /// + public ResetParameters resetParameters; + + /// + /// The protobuf for custom reset parameters. + /// NOTE: This is the last remaining relic of gRPC protocol + /// that is left in our code. We need to decide how to handle this + /// moving forward. + /// + public CustomResetParametersProto customResetParameters; + } + public struct CommunicatorInitParameters + { + /// + /// Port to listen for connections on. + /// public int port; + /// + /// The name of the environment. + /// + public string name; + /// + /// The version of the Unity SDK. + /// + public string version; + /// + /// The set of environment parameters defined by the user that will be sent to the communicator. + /// + public EnvironmentResetParameters environmentResetParameters; } + public struct UnityRLInitParameters + { + /// + /// An RNG seed sent from the python process to Unity. + /// + public int seed; + } + public struct UnityRLInputParameters + { + /// + /// Boolean sent back from python to indicate whether or not training is happening. + /// + public bool isTraining; + } + + /// + /// Delegate for handling quite events sent back from the communicator. + /// + public delegate void QuitCommandHandler(); + + /// + /// Delegate for handling reset parameter updates sent from the communicator. + /// + /// + public delegate void ResetCommandHandler(EnvironmentResetParameters resetParams); + + /// + /// Delegate to handle UnityRLInputParameters updates from the communicator. + /// + /// + public delegate void RLInputReceivedHandler(UnityRLInputParameters inputParams); /** This is the interface of the Communicators. @@ -38,34 +102,56 @@ Since the messages are sent back and forth with exchange and simultaneously when ......UnityRLOutput ......UnityRLInitializationOutput ...UnityInput - ......UnityRLIntput - ......UnityRLInitializationIntput + ......UnityRLInput + ......UnityRLInitializationInput UnityOutput and UnityInput can be extended to provide functionalities beyond RL UnityRLOutput and UnityRLInput can be extended to provide new RL functionalities */ - public interface ICommunicator + public interface ICommunicator : IBatchedDecisionMaker { /// - /// Initialize the communicator by sending the first UnityOutput and receiving the - /// first UnityInput. The second UnityInput is stored in the unityInput argument. + /// Quit was received by the communicator. + /// + event QuitCommandHandler QuitCommandReceived; + + + /// + /// Reset command sent back from the communicator. /// - /// The first Unity Input. - /// The first Unity Output. - /// The second Unity input. - UnityInput Initialize(UnityOutput unityOutput, - out UnityInput unityInput); + event ResetCommandHandler ResetCommandReceived; /// - /// Send a UnityOutput and receives a UnityInput. + /// Unity RL Input was received by the communicator. /// - /// The next UnityInput. - /// The UnityOutput to be sent. - UnityInput Exchange(UnityOutput unityOutput); + event RLInputReceivedHandler RLInputReceived; /// - /// Close the communicator gracefully on both sides of the communication. + /// Sends the academy parameters through the Communicator. + /// Is used by the academy to send the AcademyParameters to the communicator. /// - void Close(); + /// The External Initialization Parameters received. + /// The Unity Initialization Parameters to be sent. + UnityRLInitParameters Initialize(CommunicatorInitParameters initParameters); + + /// + /// Registers a new Brain to the Communicator. + /// + /// The name or key uniquely identifying the Brain + /// The Parameters for the Brain being registered + void SubscribeBrain(string name, BrainParameters brainParameters); + + /// + /// Gets the AgentActions based on the batching key. + /// + /// A key to identify which actions to get + /// + Dictionary GetActions(string key); + } + + public interface IBatchedDecisionMaker : IDisposable + { + void PutObservations(string key, Agent agent); + void DecideBatch(); } } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/ApplierImpl.cs b/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/ApplierImpl.cs index 88459cf6ef..ecdb434e20 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/ApplierImpl.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/ApplierImpl.cs @@ -13,11 +13,11 @@ namespace MLAgents.InferenceBrain /// public class ContinuousActionOutputApplier : TensorApplier.IApplier { - public void Apply(TensorProxy tensorProxy, Dictionary agentInfo) + public void Apply(TensorProxy tensorProxy, IEnumerable agents) { var actionSize = tensorProxy.shape[tensorProxy.shape.Length - 1]; var agentIndex = 0; - foreach (var agent in agentInfo.Keys) + foreach (var agent in agents) { var action = new float[actionSize]; for (var j = 0; j < actionSize; j++) @@ -47,10 +47,11 @@ public DiscreteActionOutputApplier(int[] actionSize, int seed, ITensorAllocator m_Allocator = allocator; } - public void Apply(TensorProxy tensorProxy, Dictionary agentInfo) + public void Apply(TensorProxy tensorProxy, IEnumerable agents) { //var tensorDataProbabilities = tensorProxy.Data as float[,]; - var batchSize = agentInfo.Keys.Count; + var agentsArray = agents as List ?? agents.ToList(); + var batchSize = agentsArray.Count; var actions = new float[batchSize, m_ActionSize.Length]; var startActionIndices = Utilities.CumSum(m_ActionSize); for (var actionIndex = 0; actionIndex < m_ActionSize.Length; actionIndex++) @@ -91,7 +92,7 @@ public void Apply(TensorProxy tensorProxy, Dictionary agentInf outputTensor.data.Dispose(); } var agentIndex = 0; - foreach (var agent in agentInfo.Keys) + foreach (var agent in agentsArray) { var action = new float[m_ActionSize.Length]; for (var j = 0; j < m_ActionSize.Length; j++) @@ -179,12 +180,12 @@ public BarracudaMemoryOutputApplier(int memoriesCount, int memoryIndex) m_MemoryIndex = memoryIndex; } - public void Apply(TensorProxy tensorProxy, Dictionary agentInfo) + public void Apply(TensorProxy tensorProxy, IEnumerable agents) { var agentIndex = 0; var memorySize = (int)tensorProxy.shape[tensorProxy.shape.Length - 1]; - foreach (var agent in agentInfo.Keys) + foreach (var agent in agents) { var memory = agent.GetMemoriesAction(); @@ -212,11 +213,11 @@ public void Apply(TensorProxy tensorProxy, Dictionary agentInf ///
public class MemoryOutputApplier : TensorApplier.IApplier { - public void Apply(TensorProxy tensorProxy, Dictionary agentInfo) + public void Apply(TensorProxy tensorProxy, IEnumerable agents) { var agentIndex = 0; var memorySize = tensorProxy.shape[tensorProxy.shape.Length - 1]; - foreach (var agent in agentInfo.Keys) + foreach (var agent in agents) { var memory = new List(); for (var j = 0; j < memorySize; j++) @@ -236,10 +237,10 @@ public void Apply(TensorProxy tensorProxy, Dictionary agentInf ///
public class ValueEstimateApplier : TensorApplier.IApplier { - public void Apply(TensorProxy tensorProxy, Dictionary agentInfo) + public void Apply(TensorProxy tensorProxy, IEnumerable agents) { var agentIndex = 0; - foreach (var agent in agentInfo.Keys) + foreach (var agent in agents) { agent.UpdateValueAction(tensorProxy.data[agentIndex, 0]); agentIndex++; diff --git a/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/BarracudaModelParamLoader.cs b/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/BarracudaModelParamLoader.cs index 7a7363ea8b..ccd8a698df 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/BarracudaModelParamLoader.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/BarracudaModelParamLoader.cs @@ -18,53 +18,22 @@ private enum ModelActionType Continuous } private const long k_ApiVersion = 2; - private readonly IWorker m_Engine; - private readonly Model m_Model; - private readonly BrainParameters m_BrainParameters; - private readonly List m_FailedModelChecks = new List(); /// - /// Factory for the ModelParamLoader : Creates a ModelParamLoader and runs the checks - /// on it. + /// Generates the Tensor inputs that are expected to be present in the Model. /// - /// - /// The Barracuda engine worker we get the parameters and the checks from - /// /// /// The Barracuda engine model for loading static parameters /// - /// - /// The BrainParameters that are used verify the compatibility with the InferenceEngine - /// - /// - public static BarracudaModelParamLoader GetLoaderAndCheck( - IWorker engine, Model model, BrainParameters brainParameters) - { - var modelParamLoader = new BarracudaModelParamLoader(engine, model, brainParameters); - modelParamLoader.GenerateChecks(); - return modelParamLoader; - } - - private BarracudaModelParamLoader( - IWorker engine, Model model, BrainParameters brainParameters) - { - m_Engine = engine; - m_Model = model; - m_BrainParameters = brainParameters; - } - - /// - /// Generates the Tensor inputs that are expected to be present in the Model. - /// /// TensorProxy IEnumerable with the expected Tensor inputs - public IReadOnlyList GetInputTensors() + public static IReadOnlyList GetInputTensors(Model model) { var tensors = new List(); - if (m_Model == null) + if (model == null) return tensors; - foreach (var input in m_Model.inputs) + foreach (var input in model.inputs) { tensors.Add(new TensorProxy { @@ -75,9 +44,8 @@ public IReadOnlyList GetInputTensors() }); } - foreach (var mem in m_Model.memories) + foreach (var mem in model.memories) { - //Debug.Log($"{mem.input}: {mem.shape} -> {BarracudaUtils.TensorShapeFromBarracuda(mem.shape).Length}"); tensors.Add(new TensorProxy { name = mem.input, @@ -95,22 +63,25 @@ public IReadOnlyList GetInputTensors() /// /// Generates the Tensor outputs that are expected to be present in the Model. /// + /// + /// The Barracuda engine model for loading static parameters + /// /// TensorProxy IEnumerable with the expected Tensor outputs - public string[] GetOutputNames() + public static string[] GetOutputNames(Model model) { var names = new List(); - if (m_Model == null) + if (model == null) { return names.ToArray(); } names.Add(TensorNames.ActionOutput); - var memory = GetIntScalar(TensorNames.MemorySize); + var memory = (int)model.GetTensorByName(TensorNames.MemorySize)[0]; if (memory > 0) { - foreach (var mem in m_Model.memories) + foreach (var mem in model.memories) { names.Add(mem.output); } @@ -122,71 +93,68 @@ public string[] GetOutputNames() } /// - /// Queries the InferenceEngine for the value of a variable in the graph given its name. - /// Only works with int32 Tensors with zero dimensions containing a unique element. - /// If the node was not found or could not be retrieved, the value -1 will be returned. - /// - /// The name of the Tensor variable - /// The value of the scalar variable in the model. (-1 if not found) - private int GetIntScalar(string name) - { - return (int)m_Model.GetTensorByName(name)[0]; - } - - /// - /// Retrieves an IEnumerable of string corresponding to the failed compatibility checks - /// between the InferenceEngine and the BrainParameters. - /// - public IEnumerable GetChecks() - { - return m_FailedModelChecks; - } - - /// - /// Generates the list of failed checks that failed when comparing the data from the Model - /// and from the BrainParameters + /// Factory for the ModelParamLoader : Creates a ModelParamLoader and runs the checks + /// on it. /// - private void GenerateChecks() + /// + /// The Barracuda engine model for loading static parameters + /// + /// + /// The BrainParameters that are used verify the compatibility with the InferenceEngine + /// + /// The list the error messages of the checks that failed + public static IEnumerable CheckModel(Model model, BrainParameters brainParameters) { - m_FailedModelChecks.Clear(); - if (m_Engine == null) + List failedModelChecks = new List(); + if (model == null) { - m_FailedModelChecks.Add( + failedModelChecks.Add( "There is no model for this Brain, cannot run inference. " + "(But can still train)"); - return; + return failedModelChecks; } - var modelApiVersion = GetIntScalar(TensorNames.VersionNumber); - var memorySize = GetIntScalar(TensorNames.MemorySize); - var isContinuousInt = GetIntScalar(TensorNames.IsContinuousControl); + var modelApiVersion = (int)model.GetTensorByName(TensorNames.VersionNumber)[0]; + var memorySize = (int)model.GetTensorByName(TensorNames.MemorySize)[0]; + var isContinuousInt = (int)model.GetTensorByName(TensorNames.IsContinuousControl)[0]; var isContinuous = GetActionType(isContinuousInt); - var actionSize = GetIntScalar(TensorNames.ActionOutputShape); + var actionSize = (int)model.GetTensorByName(TensorNames.ActionOutputShape)[0]; if (modelApiVersion == -1) { - m_FailedModelChecks.Add( + failedModelChecks.Add( "Model was not trained using the right version of ML-Agents. " + "Cannot use this model."); - return; + return failedModelChecks; } if (modelApiVersion != k_ApiVersion) { - m_FailedModelChecks.Add( + failedModelChecks.Add( $"Version of the trainer the model was trained with ({modelApiVersion}) " + $"is not compatible with the Brain's version ({k_ApiVersion})."); - return; + return failedModelChecks; } - CheckIntScalarPresenceHelper(new Dictionary() - { - {TensorNames.MemorySize, memorySize}, - {TensorNames.IsContinuousControl, isContinuousInt}, - {TensorNames.ActionOutputShape, actionSize} - }); - CheckInputTensorPresence(memorySize, isContinuous); - CheckOutputTensorPresence(memorySize); - CheckInputTensorShape(); - CheckOutputTensorShape(isContinuous, actionSize); + failedModelChecks.AddRange( + CheckIntScalarPresenceHelper(new Dictionary() + { + {TensorNames.MemorySize, memorySize}, + {TensorNames.IsContinuousControl, isContinuousInt}, + {TensorNames.ActionOutputShape, actionSize} + }) + ); + failedModelChecks.AddRange( + CheckInputTensorPresence(model, brainParameters, memorySize, isContinuous) + ); + failedModelChecks.AddRange( + CheckOutputTensorPresence(model, memorySize)) + ; + failedModelChecks.AddRange( + CheckInputTensorShape(model, brainParameters) + ); + failedModelChecks.AddRange( + CheckOutputTensorShape(model, brainParameters, isContinuous, actionSize) + ); + return failedModelChecks; } /// @@ -220,21 +188,31 @@ private static ModelActionType GetActionType(int isContinuousInt) /// invalid value of -1. /// /// Mapping from node names to int values - private void CheckIntScalarPresenceHelper(Dictionary requiredScalarFields) + /// The list the error messages of the checks that failed + private static IEnumerable CheckIntScalarPresenceHelper( + Dictionary requiredScalarFields) { + var failedModelChecks = new List(); foreach (var field in requiredScalarFields) { if (field.Value == -1) { - m_FailedModelChecks.Add($"Missing node in the model provided : {field.Key}"); + failedModelChecks.Add($"Missing node in the model provided : {field.Key}"); } } + return failedModelChecks; } /// /// Generates failed checks that correspond to inputs expected by the model that are not /// present in the BrainParameters. /// + /// + /// The Barracuda engine model for loading static parameters + /// + /// + /// The BrainParameters that are used verify the compatibility with the InferenceEngine + /// /// /// The memory size that the model is expecting. /// @@ -244,32 +222,25 @@ private void CheckIntScalarPresenceHelper(Dictionary requiredScalar /// /// A IEnumerable of string corresponding to the failed input presence checks. /// - private void CheckInputTensorPresence(int memory, ModelActionType isContinuous) + private static IEnumerable CheckInputTensorPresence( + Model model, + BrainParameters brainParameters, + int memory, + ModelActionType isContinuous) { - var tensorsNames = GetInputTensors().Select(x => x.name).ToList(); + var failedModelChecks = new List(); + var tensorsNames = GetInputTensors(model).Select(x => x.name).ToList(); // If there is no Vector Observation Input but the Brain Parameters expect one. - if ((m_BrainParameters.vectorObservationSize != 0) && + if ((brainParameters.vectorObservationSize != 0) && (!tensorsNames.Contains(TensorNames.VectorObservationPlacholder))) { - m_FailedModelChecks.Add( + failedModelChecks.Add( "The model does not contain a Vector Observation Placeholder Input. " + "You must set the Vector Observation Space Size to 0."); } - // If there are not enough Visual Observation Input compared to what the - // Brain Parameters expect. - for (var visObsIndex = 0; - visObsIndex < m_BrainParameters.cameraResolutions.Length; - visObsIndex++) - { - if (!tensorsNames.Contains( - TensorNames.VisualObservationPlaceholderPrefix + visObsIndex)) - { - m_FailedModelChecks.Add( - "The model does not contain a Visual Observation Placeholder Input " + - "for visual observation " + visObsIndex + "."); - } - } + + // TODO reenable checks there are enough Visual Observation Placeholder in the model. // If the model has a non-negative memory size but requires a recurrent input if (memory > 0) @@ -277,7 +248,7 @@ private void CheckInputTensorPresence(int memory, ModelActionType isContinuous) if (!tensorsNames.Any(x => x.EndsWith("_h")) || !tensorsNames.Any(x => x.EndsWith("_c"))) { - m_FailedModelChecks.Add( + failedModelChecks.Add( "The model does not contain a Recurrent Input Node but has memory_size."); } } @@ -287,103 +258,122 @@ private void CheckInputTensorPresence(int memory, ModelActionType isContinuous) { if (!tensorsNames.Contains(TensorNames.ActionMaskPlaceholder)) { - m_FailedModelChecks.Add( + failedModelChecks.Add( "The model does not contain an Action Mask but is using Discrete Control."); } } + return failedModelChecks; } /// /// Generates failed checks that correspond to outputs expected by the model that are not /// present in the BrainParameters. /// + /// + /// The Barracuda engine model for loading static parameters + /// /// The memory size that the model is expecting/ /// /// A IEnumerable of string corresponding to the failed output presence checks. /// - private void CheckOutputTensorPresence(int memory) + private static IEnumerable CheckOutputTensorPresence(Model model, int memory) { + var failedModelChecks = new List(); // If there is no Action Output. - if (!m_Model.outputs.Contains(TensorNames.ActionOutput)) + if (!model.outputs.Contains(TensorNames.ActionOutput)) { - m_FailedModelChecks.Add("The model does not contain an Action Output Node."); + failedModelChecks.Add("The model does not contain an Action Output Node."); } // If there is no Recurrent Output but the model is Recurrent. if (memory > 0) { - var memOutputs = m_Model.memories.Select(x => x.output).ToList(); + var memOutputs = model.memories.Select(x => x.output).ToList(); if (!memOutputs.Any(x => x.EndsWith("_h")) || !memOutputs.Any(x => x.EndsWith("_c"))) { - m_FailedModelChecks.Add( + failedModelChecks.Add( "The model does not contain a Recurrent Output Node but has memory_size."); } } + return failedModelChecks; } /// /// Generates failed checks that correspond to inputs shapes incompatibilities between /// the model and the BrainParameters. /// - private void CheckInputTensorShape() + /// + /// The Barracuda engine model for loading static parameters + /// + /// + /// The BrainParameters that are used verify the compatibility with the InferenceEngine + /// + /// The list the error messages of the checks that failed + private static IEnumerable CheckInputTensorShape( + Model model, BrainParameters brainParameters) { + var failedModelChecks = new List(); var tensorTester = - new Dictionary>() + new Dictionary>() { {TensorNames.VectorObservationPlacholder, CheckVectorObsShape}, {TensorNames.PreviousActionPlaceholder, CheckPreviousActionShape}, - {TensorNames.RandomNormalEpsilonPlaceholder, ((tensor) => null)}, - {TensorNames.ActionMaskPlaceholder, ((tensor) => null)}, - {TensorNames.SequenceLengthPlaceholder, ((tensor) => null)}, - {TensorNames.RecurrentInPlaceholder, ((tensor) => null)}, + {TensorNames.RandomNormalEpsilonPlaceholder, ((bp, tensor) => null)}, + {TensorNames.ActionMaskPlaceholder, ((bp, tensor) => null)}, + {TensorNames.SequenceLengthPlaceholder, ((bp, tensor) => null)}, + {TensorNames.RecurrentInPlaceholder, ((bp, tensor) => null)}, }; - foreach (var mem in m_Model.memories) + foreach (var mem in model.memories) { - tensorTester[mem.input] = ((tensor) => null); + tensorTester[mem.input] = ((bp, tensor) => null); } - for (var obsIndex = 0; obsIndex < m_BrainParameters.cameraResolutions.Length; obsIndex++) - { - var index = obsIndex; - tensorTester[TensorNames.VisualObservationPlaceholderPrefix + obsIndex] = - (tensor) => CheckVisualObsShape(tensor, index); - } + // TODO reenable checks on visual observation shapes. + // If the model expects an input but it is not in this list - foreach (var tensor in GetInputTensors()) + foreach (var tensor in GetInputTensors(model)) { if (!tensorTester.ContainsKey(tensor.name)) { - m_FailedModelChecks.Add( - "Model requires an unknown input named : " + tensor.name); + if (!tensor.name.Contains("visual_observation")) + { + failedModelChecks.Add( + "Model requires an unknown input named : " + tensor.name); + } } else { var tester = tensorTester[tensor.name]; - var error = tester.Invoke(tensor); + var error = tester.Invoke(brainParameters, tensor); if (error != null) { - m_FailedModelChecks.Add(error); + failedModelChecks.Add(error); } } } + return failedModelChecks; } /// /// Checks that the shape of the Vector Observation input placeholder is the same in the /// model and in the Brain Parameters. /// + /// + /// The BrainParameters that are used verify the compatibility with the InferenceEngine + /// /// The tensor that is expected by the model /// /// If the Check failed, returns a string containing information about why the /// check failed. If the check passed, returns null. /// - private string CheckVectorObsShape(TensorProxy tensorProxy) + private static string CheckVectorObsShape( + BrainParameters brainParameters, TensorProxy tensorProxy) { - var vecObsSizeBp = m_BrainParameters.vectorObservationSize; - var numStackedVector = m_BrainParameters.numStackedVectorObservations; + var vecObsSizeBp = brainParameters.vectorObservationSize; + var numStackedVector = brainParameters.numStackedVectorObservations; var totalVecObsSizeT = tensorProxy.shape[tensorProxy.shape.Length - 1]; if (vecObsSizeBp * numStackedVector != totalVecObsSizeT) { @@ -397,12 +387,16 @@ private string CheckVectorObsShape(TensorProxy tensorProxy) /// Checks that the shape of the Previous Vector Action input placeholder is the same in the /// model and in the Brain Parameters. ///
+ /// + /// The BrainParameters that are used verify the compatibility with the InferenceEngine + /// /// The tensor that is expected by the model /// If the Check failed, returns a string containing information about why the /// check failed. If the check passed, returns null. - private string CheckPreviousActionShape(TensorProxy tensorProxy) + private static string CheckPreviousActionShape( + BrainParameters brainParameters, TensorProxy tensorProxy) { - var numberActionsBp = m_BrainParameters.vectorActionSize.Length; + var numberActionsBp = brainParameters.vectorActionSize.Length; var numberActionsT = tensorProxy.shape[tensorProxy.shape.Length - 1]; if (numberActionsBp != numberActionsT) { @@ -412,38 +406,16 @@ private string CheckPreviousActionShape(TensorProxy tensorProxy) return null; } - /// - /// Checks that the shape of the visual observation input placeholder is the same in the - /// model and in the Brain Parameters. - /// - /// The tensor that is expected by the model - /// The index of the visual observation. - /// - /// If the Check failed, returns a string containing information about why the - /// check failed. If the check passed, returns null. - /// - private string CheckVisualObsShape(TensorProxy tensorProxy, int visObsIndex) - { - var resolutionBp = m_BrainParameters.cameraResolutions[visObsIndex]; - var widthBp = resolutionBp.width; - var heightBp = resolutionBp.height; - var pixelBp = resolutionBp.blackAndWhite ? 1 : 3; - var heightT = tensorProxy.shape[1]; - var widthT = tensorProxy.shape[2]; - var pixelT = tensorProxy.shape[3]; - if ((widthBp != widthT) || (heightBp != heightT) || (pixelBp != pixelT)) - { - return $"The visual Observation {visObsIndex} of the model does not match. " + - $"Received TensorProxy of shape [?x{widthBp}x{heightBp}x{pixelBp}] but " + - $"was expecting [?x{widthT}x{heightT}x{pixelT}]."; - } - return null; - } - /// /// Generates failed checks that correspond to output shapes incompatibilities between /// the model and the BrainParameters. /// + /// + /// The Barracuda engine model for loading static parameters + /// + /// + /// The BrainParameters that are used verify the compatibility with the InferenceEngine + /// /// /// Whether the model is expecting continuous or discrete control. /// @@ -454,31 +426,36 @@ private string CheckVisualObsShape(TensorProxy tensorProxy, int visObsIndex) /// A IEnumerable of string corresponding to the incompatible shapes between model /// and BrainParameters. /// - private void CheckOutputTensorShape(ModelActionType isContinuous, int modelActionSize) + private static IEnumerable CheckOutputTensorShape( + Model model, + BrainParameters brainParameters, + ModelActionType isContinuous, + int modelActionSize) { + var failedModelChecks = new List(); if (isContinuous == ModelActionType.Unknown) { - m_FailedModelChecks.Add("Cannot infer type of Control from the provided model."); - return; + failedModelChecks.Add("Cannot infer type of Control from the provided model."); + return failedModelChecks; } if (isContinuous == ModelActionType.Continuous && - m_BrainParameters.vectorActionSpaceType != SpaceType.Continuous) + brainParameters.vectorActionSpaceType != SpaceType.Continuous) { - m_FailedModelChecks.Add( + failedModelChecks.Add( "Model has been trained using Continuous Control but the Brain Parameters " + "suggest Discrete Control."); - return; + return failedModelChecks; } if (isContinuous == ModelActionType.Discrete && - m_BrainParameters.vectorActionSpaceType != SpaceType.Discrete) + brainParameters.vectorActionSpaceType != SpaceType.Discrete) { - m_FailedModelChecks.Add( + failedModelChecks.Add( "Model has been trained using Discrete Control but the Brain Parameters " + "suggest Continuous Control."); - return; + return failedModelChecks; } - var tensorTester = new Dictionary>(); - if (m_BrainParameters.vectorActionSpaceType == SpaceType.Continuous) + var tensorTester = new Dictionary>(); + if (brainParameters.vectorActionSpaceType == SpaceType.Continuous) { tensorTester[TensorNames.ActionOutput] = CheckContinuousActionOutputShape; } @@ -487,24 +464,28 @@ private void CheckOutputTensorShape(ModelActionType isContinuous, int modelActio tensorTester[TensorNames.ActionOutput] = CheckDiscreteActionOutputShape; } // If the model expects an output but it is not in this list - foreach (var name in m_Model.outputs) + foreach (var name in model.outputs) { if (tensorTester.ContainsKey(name)) { var tester = tensorTester[name]; - var error = tester.Invoke(m_Model.GetShapeByName(name), modelActionSize); + var error = tester.Invoke(brainParameters, model.GetShapeByName(name), modelActionSize); if (error != null) { - m_FailedModelChecks.Add(error); + failedModelChecks.Add(error); } } } + return failedModelChecks; } /// /// Checks that the shape of the discrete action output is the same in the /// model and in the Brain Parameters. /// + /// + /// The BrainParameters that are used verify the compatibility with the InferenceEngine + /// /// The tensor shape that is expected by the model /// /// The size of the action output that is expected by the model. @@ -513,9 +494,10 @@ private void CheckOutputTensorShape(ModelActionType isContinuous, int modelActio /// If the Check failed, returns a string containing information about why the /// check failed. If the check passed, returns null. /// - private string CheckDiscreteActionOutputShape(TensorShape shape, int modelActionSize) + private static string CheckDiscreteActionOutputShape( + BrainParameters brainParameters, TensorShape shape, int modelActionSize) { - var bpActionSize = m_BrainParameters.vectorActionSize.Sum(); + var bpActionSize = brainParameters.vectorActionSize.Sum(); if (modelActionSize != bpActionSize) { return "Action Size of the model does not match. The BrainParameters expect " + @@ -528,15 +510,19 @@ private string CheckDiscreteActionOutputShape(TensorShape shape, int modelAction /// Checks that the shape of the continuous action output is the same in the /// model and in the Brain Parameters. /// + /// + /// The BrainParameters that are used verify the compatibility with the InferenceEngine + /// /// The tensor shape that is expected by the model /// /// The size of the action output that is expected by the model. /// /// If the Check failed, returns a string containing information about why the /// check failed. If the check passed, returns null. - private string CheckContinuousActionOutputShape(TensorShape shape, int modelActionSize) + private static string CheckContinuousActionOutputShape( + BrainParameters brainParameters, TensorShape shape, int modelActionSize) { - var bpActionSize = m_BrainParameters.vectorActionSize[0]; + var bpActionSize = brainParameters.vectorActionSize[0]; if (modelActionSize != bpActionSize) { return "Action Size of the model does not match. The BrainParameters expect " + diff --git a/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/GeneratorImpl.cs b/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/GeneratorImpl.cs index f88a245908..c35461356b 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/GeneratorImpl.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/GeneratorImpl.cs @@ -20,7 +20,7 @@ public BiDimensionalOutputGenerator(ITensorAllocator allocator) m_Allocator = allocator; } - public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary agentInfo) + public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable agents) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); } @@ -39,7 +39,7 @@ public BatchSizeGenerator(ITensorAllocator allocator) m_Allocator = allocator; } - public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary agentInfo) + public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable agents) { tensorProxy.data?.Dispose(); tensorProxy.data = m_Allocator.Alloc(new TensorShape(1, 1)); @@ -62,7 +62,7 @@ public SequenceLengthGenerator(ITensorAllocator allocator) m_Allocator = allocator; } - public void Generate(TensorProxy tensorProxy, int batchSize, Dictionary agentInfo) + public void Generate(TensorProxy tensorProxy, int batchSize, IEnumerable agents) { tensorProxy.shape = new long[0]; tensorProxy.data?.Dispose(); @@ -86,15 +86,15 @@ public VectorObservationGenerator(ITensorAllocator allocator) } public void Generate( - TensorProxy tensorProxy, int batchSize, Dictionary agentInfo) + TensorProxy tensorProxy, int batchSize, IEnumerable agents) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); var vecObsSizeT = tensorProxy.shape[tensorProxy.shape.Length - 1]; - var agentIndex = 0; - foreach (var agent in agentInfo.Keys) + foreach (var agent in agents) { - var vectorObs = agentInfo[agent].stackedVectorObservation; + var info = agent.Info; + var vectorObs = info.stackedVectorObservation; for (var j = 0; j < vecObsSizeT; j++) { tensorProxy.data[agentIndex, j] = vectorObs[j]; @@ -120,15 +120,16 @@ public RecurrentInputGenerator(ITensorAllocator allocator) } public void Generate( - TensorProxy tensorProxy, int batchSize, Dictionary agentInfo) + TensorProxy tensorProxy, int batchSize, IEnumerable agents) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); var memorySize = tensorProxy.shape[tensorProxy.shape.Length - 1]; var agentIndex = 0; - foreach (var agent in agentInfo.Keys) + foreach (var agent in agents) { - var memory = agentInfo[agent].memories; + var info = agent.Info; + var memory = info.memories; if (memory == null) { agentIndex++; @@ -160,15 +161,16 @@ public BarracudaRecurrentInputGenerator(int memoryIndex, ITensorAllocator alloca } public void Generate( - TensorProxy tensorProxy, int batchSize, Dictionary agentInfo) + TensorProxy tensorProxy, int batchSize, IEnumerable agents) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); var memorySize = (int)tensorProxy.shape[tensorProxy.shape.Length - 1]; var agentIndex = 0; - foreach (var agent in agentInfo.Keys) + foreach (var agent in agents) { - var memory = agentInfo[agent].memories; + var agentInfo = agent.Info; + var memory = agentInfo.memories; var offset = memorySize * m_MemoryIndex; @@ -206,15 +208,16 @@ public PreviousActionInputGenerator(ITensorAllocator allocator) } public void Generate( - TensorProxy tensorProxy, int batchSize, Dictionary agentInfo) + TensorProxy tensorProxy, int batchSize, IEnumerable agents) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); var actionSize = tensorProxy.shape[tensorProxy.shape.Length - 1]; var agentIndex = 0; - foreach (var agent in agentInfo.Keys) + foreach (var agent in agents) { - var pastAction = agentInfo[agent].storedVectorActions; + var info = agent.Info; + var pastAction = info.storedVectorActions; for (var j = 0; j < actionSize; j++) { tensorProxy.data[agentIndex, j] = pastAction[j]; @@ -241,15 +244,16 @@ public ActionMaskInputGenerator(ITensorAllocator allocator) } public void Generate( - TensorProxy tensorProxy, int batchSize, Dictionary agentInfo) + TensorProxy tensorProxy, int batchSize, IEnumerable agents) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); var maskSize = tensorProxy.shape[tensorProxy.shape.Length - 1]; var agentIndex = 0; - foreach (var agent in agentInfo.Keys) + foreach (var agent in agents) { - var maskList = agentInfo[agent].actionMasks; + var agentInfo = agent.Info; + var maskList = agentInfo.actionMasks; for (var j = 0; j < maskSize; j++) { var isUnmasked = (maskList != null && maskList[j]) ? 0.0f : 1.0f; @@ -277,7 +281,7 @@ public RandomNormalInputGenerator(int seed, ITensorAllocator allocator) } public void Generate( - TensorProxy tensorProxy, int batchSize, Dictionary agentInfo) + TensorProxy tensorProxy, int batchSize, IEnumerable agents) { TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); TensorUtils.FillTensorWithRandomNormal(tensorProxy, m_RandomNormal); @@ -297,21 +301,24 @@ public class VisualObservationInputGenerator : TensorGenerator.IGenerator private readonly ITensorAllocator m_Allocator; public VisualObservationInputGenerator( - int index, bool grayScale, ITensorAllocator allocator) + int index, ITensorAllocator allocator) { m_Index = index; - m_GrayScale = grayScale; m_Allocator = allocator; } public void Generate( - TensorProxy tensorProxy, int batchSize, Dictionary agentInfo) + TensorProxy tensorProxy, int batchSize, IEnumerable agents) { - var textures = agentInfo.Keys.Select( - agent => agentInfo[agent].visualObservations[m_Index]).ToList(); - TensorUtils.ResizeTensor(tensorProxy, batchSize, m_Allocator); - Utilities.TextureToTensorProxy(textures, tensorProxy, m_GrayScale); + var agentIndex = 0; + foreach (var agent in agents) + { + // TODO direct access to sensors list here - should we do it differently? + // TODO m_Index here is the visual observation index. Will work for now but not if we add more sensor types. + agent.m_Sensors[m_Index].WriteToTensor(tensorProxy, agentIndex); + agentIndex++; + } } } } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/ModelRunner.cs b/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/ModelRunner.cs new file mode 100644 index 0000000000..85301e9313 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/ModelRunner.cs @@ -0,0 +1,159 @@ +using System.Collections.Generic; +using Barracuda; +using UnityEngine.Profiling; + +namespace MLAgents.InferenceBrain +{ + public class ModelRunner : IBatchedDecisionMaker + { + private List m_Agents = new List(); + private ITensorAllocator m_TensorAllocator; + private TensorGenerator m_TensorGenerator; + private TensorApplier m_TensorApplier; + + private NNModel m_Model; + private InferenceDevice m_InferenceDevice; + private IWorker m_Engine; + private bool m_Verbose = false; + private string[] m_OutputNames; + private IReadOnlyList m_InferenceInputs; + private IReadOnlyList m_InferenceOutputs; + + private bool m_visualObservationsInitialized = false; + + /// + /// Initializes the Brain with the Model that it will use when selecting actions for + /// the agents + /// + /// The Barracuda model to load + /// The parameters of the Brain used to generate the + /// placeholder tensors + /// Inference execution device. CPU is the fastest + /// option for most of ML Agents models. + /// The seed that will be used to initialize the RandomNormal + /// and Multinomial objects used when running inference. + /// Throws an error when the model is null + /// + public ModelRunner( + NNModel model, + BrainParameters brainParameters, + InferenceDevice inferenceDevice = InferenceDevice.CPU, + int seed = 0) + { + Model barracudaModel; + m_Model = model; + m_InferenceDevice = inferenceDevice; + m_TensorAllocator = new TensorCachingAllocator(); + if (model != null) + { +#if BARRACUDA_VERBOSE + m_Verbose = true; +#endif + + D.logEnabled = m_Verbose; + + barracudaModel = ModelLoader.Load(model.Value); + var executionDevice = inferenceDevice == InferenceDevice.GPU + ? BarracudaWorkerFactory.Type.ComputePrecompiled + : BarracudaWorkerFactory.Type.CSharp; + m_Engine = BarracudaWorkerFactory.CreateWorker(executionDevice, barracudaModel, m_Verbose); + } + else + { + barracudaModel = null; + m_Engine = null; + } + + m_InferenceInputs = BarracudaModelParamLoader.GetInputTensors(barracudaModel); + m_OutputNames = BarracudaModelParamLoader.GetOutputNames(barracudaModel); + m_TensorGenerator = new TensorGenerator(brainParameters, seed, m_TensorAllocator, barracudaModel); + m_TensorApplier = new TensorApplier(brainParameters, seed, m_TensorAllocator, barracudaModel); + } + + private static Dictionary PrepareBarracudaInputs(IEnumerable infInputs) + { + var inputs = new Dictionary(); + foreach (var inp in infInputs) + { + inputs[inp.name] = inp.data; + } + + return inputs; + } + + public void Dispose() + { + if (m_Engine != null) + m_Engine.Dispose(); + m_TensorAllocator?.Reset(false); + } + + private List FetchBarracudaOutputs(string[] names) + { + var outputs = new List(); + foreach (var n in names) + { + var output = m_Engine.Peek(n); + outputs.Add(TensorUtils.TensorProxyFromBarracuda(output, n)); + } + + return outputs; + } + + public void PutObservations(string key, Agent agent) + { + m_Agents.Add(agent); + } + public void DecideBatch() + { + var currentBatchSize = m_Agents.Count; + if (currentBatchSize == 0) + { + return; + } + + if (!m_visualObservationsInitialized) + { + // Just grab the first agent in the collection (any will suffice, really). + // We check for an empty Collection above, so this will always return successfully. + var firstAgent = m_Agents[0]; + m_TensorGenerator.InitializeVisualObservations(firstAgent, m_TensorAllocator); + m_visualObservationsInitialized = true; + } + + Profiler.BeginSample("LearningBrain.DecideAction"); + + Profiler.BeginSample($"MLAgents.{m_Model.name}.GenerateTensors"); + // Prepare the input tensors to be feed into the engine + m_TensorGenerator.GenerateTensors(m_InferenceInputs, currentBatchSize, m_Agents); + Profiler.EndSample(); + + Profiler.BeginSample($"MLAgents.{m_Model.name}.PrepareBarracudaInputs"); + var inputs = PrepareBarracudaInputs(m_InferenceInputs); + Profiler.EndSample(); + + // Execute the Model + Profiler.BeginSample($"MLAgents.{m_Model.name}.ExecuteGraph"); + m_Engine.Execute(inputs); + Profiler.EndSample(); + + Profiler.BeginSample($"MLAgents.{m_Model.name}.FetchBarracudaOutputs"); + m_InferenceOutputs = FetchBarracudaOutputs(m_OutputNames); + Profiler.EndSample(); + + Profiler.BeginSample($"MLAgents.{m_Model.name}.ApplyTensors"); + // Update the outputs + m_TensorApplier.ApplyTensors(m_InferenceOutputs, m_Agents); + Profiler.EndSample(); + + Profiler.EndSample(); + + m_Agents.Clear(); + } + + public bool HasModel(NNModel other, InferenceDevice otherInferenceDevice) + { + return m_Model == other && m_InferenceDevice == otherInferenceDevice; + } + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/ModelRunner.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/ModelRunner.cs.meta new file mode 100644 index 0000000000..e4e8e67539 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/ModelRunner.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 8f3f4b630ca3f4a4ba74922ec8249046 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/TensorApplier.cs b/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/TensorApplier.cs index 374c17d078..610abb6cf9 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/TensorApplier.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/TensorApplier.cs @@ -27,11 +27,10 @@ public interface IApplier /// /// The Tensor containing the data to be applied to the Agents /// - /// - /// Dictionary of Agents to AgentInfo that will receive - /// the values of the Tensor. + /// + /// List of Agents that will receive the values of the Tensor. /// - void Apply(TensorProxy tensorProxy, Dictionary agentInfo); + void Apply(TensorProxy tensorProxy, IEnumerable agents); } private readonly Dictionary m_Dict = new Dictionary(); @@ -75,12 +74,11 @@ public TensorApplier( /// Updates the state of the agents based on the data present in the tensor. /// /// Enumerable of tensors containing the data. - /// Dictionary of Agent to AgentInfo that contains the - /// Agents that will be updated using the tensor's data + /// List of Agents that will be updated using the tensor's data /// One of the tensor does not have an /// associated applier. public void ApplyTensors( - IEnumerable tensors, Dictionary agentInfos) + IEnumerable tensors, IEnumerable agents) { foreach (var tensor in tensors) { @@ -89,7 +87,7 @@ public void ApplyTensors( throw new UnityAgentsException( $"Unknown tensorProxy expected as output : {tensor.name}"); } - m_Dict[tensor.name].Apply(tensor, agentInfos); + m_Dict[tensor.name].Apply(tensor, agents); } } } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/TensorGenerator.cs b/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/TensorGenerator.cs index 56e8d2332d..452f15f92b 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/TensorGenerator.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/InferenceBrain/TensorGenerator.cs @@ -25,10 +25,10 @@ public interface IGenerator /// /// The tensor the data and shape will be modified /// The number of agents present in the current batch - /// Dictionary of Agent to AgentInfo containing the + /// List of Agents containing the /// information that will be used to populate the tensor's data void Generate( - TensorProxy tensorProxy, int batchSize, Dictionary agentInfo); + TensorProxy tensorProxy, int batchSize, IEnumerable agents); } private readonly Dictionary m_Dict = new Dictionary(); @@ -70,18 +70,7 @@ public TensorGenerator( new ActionMaskInputGenerator(allocator); m_Dict[TensorNames.RandomNormalEpsilonPlaceholder] = new RandomNormalInputGenerator(seed, allocator); - if (bp.cameraResolutions != null) - { - for (var visIndex = 0; - visIndex < bp.cameraResolutions.Length; - visIndex++) - { - var index = visIndex; - var bw = bp.cameraResolutions[visIndex].blackAndWhite; - m_Dict[TensorNames.VisualObservationPlaceholderPrefix + visIndex] = - new VisualObservationInputGenerator(index, bw, allocator); - } - } + // Generators for Outputs m_Dict[TensorNames.ActionOutput] = new BiDimensionalOutputGenerator(allocator); @@ -89,6 +78,16 @@ public TensorGenerator( m_Dict[TensorNames.ValueEstimateOutput] = new BiDimensionalOutputGenerator(allocator); } + public void InitializeVisualObservations(Agent agent, ITensorAllocator allocator) + { + for (var visIndex = 0; visIndex < agent.m_Sensors.Count; visIndex++) + { + // TODO handle non-visual sensors too - need to index better + m_Dict[TensorNames.VisualObservationPlaceholderPrefix + visIndex] = + new VisualObservationInputGenerator(visIndex, allocator); + } + } + /// /// Populates the data of the tensor inputs given the data contained in the current batch /// of agents. @@ -96,14 +95,14 @@ public TensorGenerator( /// Enumerable of tensors that will be modified. /// The number of agents present in the current batch /// - /// Dictionary of Agent to AgentInfo that contains the + /// List of Agents that contains the /// data that will be used to modify the tensors /// One of the tensor does not have an /// associated generator. public void GenerateTensors( IEnumerable tensors, int currentBatchSize, - Dictionary agentInfos) + IEnumerable agents) { foreach (var tensor in tensors) { @@ -112,7 +111,7 @@ public void GenerateTensors( throw new UnityAgentsException( $"Unknown tensorProxy expected as input : {tensor.name}"); } - m_Dict[tensor.name].Generate(tensor, currentBatchSize, agentInfos); + m_Dict[tensor.name].Generate(tensor, currentBatchSize, agents); } } } diff --git a/UnitySDK/Assets/ML-Agents/Scripts/LearningBrain.cs b/UnitySDK/Assets/ML-Agents/Scripts/LearningBrain.cs deleted file mode 100644 index d45b1d890d..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/LearningBrain.cs +++ /dev/null @@ -1,203 +0,0 @@ -using System; -using System.Collections.Generic; -using UnityEngine; -using System.Linq; -using Barracuda; -using MLAgents.InferenceBrain; -using UnityEngine.Profiling; - -namespace MLAgents -{ - public enum InferenceDevice - { - CPU = 0, - GPU = 1 - } - - /// - /// The Learning Brain works differently if you are training it or not. - /// When training your Agents, drag the Learning Brain to the Academy's BroadcastHub and check - /// the checkbox Control. When using a pretrained model, just drag the Model file into the - /// Model property of the Learning Brain. - /// The property model corresponds to the Model currently attached to the Brain. Before - /// being used, a call to ReloadModel is required. - /// When the Learning Brain is not training, it uses a TensorFlow model to make decisions. - /// The Proximal Policy Optimization (PPO) and Behavioral Cloning algorithms included with - /// the ML-Agents SDK produce trained TensorFlow models that you can use with the - /// Learning Brain. - /// - [CreateAssetMenu(fileName = "NewLearningBrain", menuName = "ML-Agents/Learning Brain")] - public class LearningBrain : Brain - { - private ITensorAllocator m_TensorAllocator; - private TensorGenerator m_TensorGenerator; - private TensorApplier m_TensorApplier; - public NNModel model; - private Model m_BarracudaModel; - private IWorker m_Engine; - private bool m_Verbose = false; - - private BarracudaModelParamLoader m_ModelParamLoader; - private string[] m_OutputNames; - - [Tooltip("Inference execution device. CPU is the fastest option for most of ML Agents models. " + - "(This field is not applicable for training).")] - public InferenceDevice inferenceDevice = InferenceDevice.CPU; - - private IReadOnlyList m_InferenceInputs; - private IReadOnlyList m_InferenceOutputs; - - [NonSerialized] - private bool m_IsControlled; - - /// - /// When Called, the brain will be controlled externally. It will not use the - /// model to decide on actions. - /// - public void SetToControlledExternally() - { - m_IsControlled = true; - } - - /// - protected override void Initialize() - { - ReloadModel(); - } - - /// - /// Initializes the Brain with the Model that it will use when selecting actions for - /// the agents - /// - /// The seed that will be used to initialize the RandomNormal - /// and Multinomial obsjects used when running inference. - /// Throws an error when the model is null - /// - public void ReloadModel(int seed = 0) - { - if (m_TensorAllocator == null) - m_TensorAllocator = new TensorCachingAllocator(); - - if (model != null) - { -#if BARRACUDA_VERBOSE - _verbose = true; -#endif - - D.logEnabled = m_Verbose; - - // Cleanup previous instance - if (m_Engine != null) - m_Engine.Dispose(); - - m_BarracudaModel = ModelLoader.Load(model.Value); - var executionDevice = inferenceDevice == InferenceDevice.GPU - ? BarracudaWorkerFactory.Type.ComputePrecompiled - : BarracudaWorkerFactory.Type.CSharp; - - m_Engine = BarracudaWorkerFactory.CreateWorker(executionDevice, m_BarracudaModel, m_Verbose); - } - else - { - m_BarracudaModel = null; - m_Engine = null; - } - - m_ModelParamLoader = BarracudaModelParamLoader.GetLoaderAndCheck(m_Engine, m_BarracudaModel, brainParameters); - m_InferenceInputs = m_ModelParamLoader.GetInputTensors(); - m_OutputNames = m_ModelParamLoader.GetOutputNames(); - m_TensorGenerator = new TensorGenerator(brainParameters, seed, m_TensorAllocator, m_BarracudaModel); - m_TensorApplier = new TensorApplier(brainParameters, seed, m_TensorAllocator, m_BarracudaModel); - } - - /// - /// Return a list of failed checks corresponding to the failed compatibility checks - /// between the Model and the BrainParameters. Note : This does not reload the model. - /// If changes have been made to the BrainParameters or the Model, the model must be - /// reloaded using GiveModel before trying to get the compatibility checks. - /// - /// The list of the failed compatibility checks between the Model and the - /// Brain Parameters - public IEnumerable GetModelFailedChecks() - { - return (m_ModelParamLoader != null) ? m_ModelParamLoader.GetChecks() : new List(); - } - - /// - protected override void DecideAction() - { - if (m_IsControlled) - { - m_AgentInfos.Clear(); - return; - } - var currentBatchSize = m_AgentInfos.Count(); - if (currentBatchSize == 0) - { - return; - } - - Profiler.BeginSample("LearningBrain.DecideAction"); - if (m_Engine == null) - { - Debug.LogError($"No model was present for the Brain {name}."); - return; - } - - Profiler.BeginSample($"MLAgents.{name}.GenerateTensors"); - // Prepare the input tensors to be feed into the engine - m_TensorGenerator.GenerateTensors(m_InferenceInputs, currentBatchSize, m_AgentInfos); - Profiler.EndSample(); - - Profiler.BeginSample($"MLAgents.{name}.PrepareBarracudaInputs"); - var inputs = PrepareBarracudaInputs(m_InferenceInputs); - Profiler.EndSample(); - - // Execute the Model - Profiler.BeginSample($"MLAgents.{name}.ExecuteGraph"); - m_Engine.Execute(inputs); - Profiler.EndSample(); - - Profiler.BeginSample($"MLAgents.{name}.FetchBarracudaOutputs"); - m_InferenceOutputs = FetchBarracudaOutputs(m_OutputNames); - Profiler.EndSample(); - - Profiler.BeginSample($"MLAgents.{name}.ApplyTensors"); - // Update the outputs - m_TensorApplier.ApplyTensors(m_InferenceOutputs, m_AgentInfos); - Profiler.EndSample(); - - m_AgentInfos.Clear(); - Profiler.EndSample(); - } - - protected Dictionary PrepareBarracudaInputs(IEnumerable infInputs) - { - var inputs = new Dictionary(); - foreach (var inp in m_InferenceInputs) - { - inputs[inp.name] = inp.data; - } - - return inputs; - } - - protected List FetchBarracudaOutputs(string[] names) - { - var outputs = new List(); - foreach (var n in names) - { - var output = m_Engine.Peek(n); - outputs.Add(TensorUtils.TensorProxyFromBarracuda(output, n)); - } - - return outputs; - } - - public void OnDisable() - { - m_Engine?.Dispose(); - m_TensorAllocator?.Reset(false); - } - } -} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/LearningBrain.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/LearningBrain.cs.meta deleted file mode 100644 index cd924839b8..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/LearningBrain.cs.meta +++ /dev/null @@ -1,11 +0,0 @@ -fileFormatVersion: 2 -guid: 8b23992c8eb17439887f5e944bf04a40 -MonoImporter: - externalObjects: {} - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/PlayerBrain.cs b/UnitySDK/Assets/ML-Agents/Scripts/PlayerBrain.cs deleted file mode 100644 index fe07a7573c..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/PlayerBrain.cs +++ /dev/null @@ -1,105 +0,0 @@ -using UnityEngine; -using UnityEngine.Serialization; - -namespace MLAgents -{ - /// - /// Implemetation of the Player Brain. Inherits from the base class Brain. Allows the user to - /// manually select decisions for linked agents by creating a mapping from keys presses to - /// actions. - /// You can use Player Brains to control a "teacher" Agent that trains other Agents during - /// imitation learning. You can also use Player Brains to test your Agents and environment - /// before training agents with reinforcement learning. - /// - [CreateAssetMenu(fileName = "NewPlayerBrain", menuName = "ML-Agents/Player Brain")] - public class PlayerBrain : Brain - { - [System.Serializable] - public struct DiscretePlayerAction - { - public KeyCode key; - public int branchIndex; - public int value; - } - - [System.Serializable] - public struct KeyContinuousPlayerAction - { - public KeyCode key; - public int index; - public float value; - } - - [System.Serializable] - public struct AxisContinuousPlayerAction - { - public string axis; - public int index; - public float scale; - } - - /// Contains the mapping from input to continuous actions - [SerializeField] - [FormerlySerializedAs("continuousPlayerActions")] - [Tooltip("The list of keys and the value they correspond to for continuous control.")] - public KeyContinuousPlayerAction[] keyContinuousPlayerActions; - - /// Contains the mapping from input to continuous actions - [SerializeField] - [Tooltip("The list of axis actions.")] - public AxisContinuousPlayerAction[] axisContinuousPlayerActions; - - /// Contains the mapping from input to discrete actions - [SerializeField] - [Tooltip("The list of keys and the value they correspond to for discrete control.")] - public DiscretePlayerAction[] discretePlayerActions; - - protected override void Initialize() {} - - /// Uses the continuous inputs or dicrete inputs of the player to - /// decide action - protected override void DecideAction() - { - if (brainParameters.vectorActionSpaceType == SpaceType.Continuous) - { - foreach (var agent in m_AgentInfos.Keys) - { - var action = new float[brainParameters.vectorActionSize[0]]; - foreach (var cha in keyContinuousPlayerActions) - { - if (Input.GetKey(cha.key)) - { - action[cha.index] = cha.value; - } - } - foreach (var axisAction in axisContinuousPlayerActions) - { - var axisValue = Input.GetAxis(axisAction.axis); - axisValue *= axisAction.scale; - if (Mathf.Abs(axisValue) > 0.0001) - { - action[axisAction.index] = axisValue; - } - } - agent.UpdateVectorAction(action); - } - } - else - { - foreach (var agent in m_AgentInfos.Keys) - { - var action = new float[brainParameters.vectorActionSize.Length]; - foreach (var dha in discretePlayerActions) - { - if (Input.GetKey(dha.key)) - { - action[dha.branchIndex] = dha.value; - } - } - agent.UpdateVectorAction(action); - } - } - m_AgentInfos.Clear(); - } - } -} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/PlayerBrain.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/PlayerBrain.cs.meta deleted file mode 100755 index 6094287af6..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/PlayerBrain.cs.meta +++ /dev/null @@ -1,12 +0,0 @@ -fileFormatVersion: 2 -guid: 41e9bda8f3cf1492fa74926a530f6f70 -timeCreated: 1504070375 -licenseType: Free -MonoImporter: - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/Gizmos.meta b/UnitySDK/Assets/ML-Agents/Scripts/Policy.meta similarity index 77% rename from UnitySDK/Assets/Gizmos.meta rename to UnitySDK/Assets/ML-Agents/Scripts/Policy.meta index 5d0f14a77d..7ef40d7aff 100644 --- a/UnitySDK/Assets/Gizmos.meta +++ b/UnitySDK/Assets/ML-Agents/Scripts/Policy.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: e74b9e5c364014df1a24c696734cc461 +guid: 9a2c172aaf37944daa816ee2419c043a folderAsset: yes DefaultImporter: externalObjects: {} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Policy/BarracudaPolicy.cs b/UnitySDK/Assets/ML-Agents/Scripts/Policy/BarracudaPolicy.cs new file mode 100644 index 0000000000..65d66f4239 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Policy/BarracudaPolicy.cs @@ -0,0 +1,93 @@ +using UnityEngine; +using Barracuda; +using System.Collections.Generic; + +namespace MLAgents +{ + public enum InferenceDevice + { + CPU = 0, + GPU = 1 + } + + /// + /// The Barracuda Policy uses a Barracuda Model to make decisions at + /// every step. It uses a ModelRunner that is shared accross all + /// Barracuda Policies that use the same model and inference devices. + /// + public class BarracudaPolicy : IPolicy + { + + protected IBatchedDecisionMaker m_BatchedDecisionMaker; + + /// + /// Sensor shapes for the associated Agents. All Agents must have the same shapes for their sensors. + /// + List m_SensorShapes; + + /// + public BarracudaPolicy( + BrainParameters brainParameters, + NNModel model, + InferenceDevice inferenceDevice) + { + var aca = GameObject.FindObjectOfType(); + aca.LazyInitialization(); + var modelRunner = aca.GetOrCreateModelRunner(model, brainParameters, inferenceDevice); + m_BatchedDecisionMaker = modelRunner; + } + + /// + public void RequestDecision(Agent agent) + { +#if DEBUG + ValidateAgentSensorShapes(agent); +#endif + m_BatchedDecisionMaker?.PutObservations(null, agent); + } + + /// + public void DecideAction() + { + m_BatchedDecisionMaker?.DecideBatch(); + } + + /// + /// Check that the Agent sensors are the same shape as the the other Agents using the same Brain. + /// If this is the first Agent being checked, its Sensor sizes will be saved. + /// + /// The Agent to check + private void ValidateAgentSensorShapes(Agent agent) + { + if (m_SensorShapes == null) + { + m_SensorShapes = new List(agent.m_Sensors.Count); + // First agent, save the sensor sizes + foreach (var sensor in agent.m_Sensors) + { + m_SensorShapes.Add(sensor.GetFloatObservationShape()); + } + } + else + { + // Check for compatibility with the other Agents' sensors + // TODO make sure this only checks once per agent + Debug.Assert(m_SensorShapes.Count == agent.m_Sensors.Count, $"Number of sensors must match. {m_SensorShapes.Count} != {agent.m_Sensors.Count}"); + for (var i = 0; i < m_SensorShapes.Count; i++) + { + var cachedShape = m_SensorShapes[i]; + var sensorShape = agent.m_Sensors[i].GetFloatObservationShape(); + Debug.Assert(cachedShape.Length == sensorShape.Length, "Sensor dimensions must match."); + for (var j = 0; j < cachedShape.Length; j++) + { + Debug.Assert(cachedShape[j] == sensorShape[j], "Sensor sizes much match."); + } + } + } + } + + public void Dispose() + { + } + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Policy/BarracudaPolicy.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Policy/BarracudaPolicy.cs.meta new file mode 100644 index 0000000000..014a05302d --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Policy/BarracudaPolicy.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 8eb047b11855142d2be2cc458bef3264 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Policy/BehaviorParameters.cs b/UnitySDK/Assets/ML-Agents/Scripts/Policy/BehaviorParameters.cs new file mode 100644 index 0000000000..d29ad4b887 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Policy/BehaviorParameters.cs @@ -0,0 +1,64 @@ +using Barracuda; +using System; +using UnityEngine; + +namespace MLAgents +{ + + /// + /// The Factory to generate policies. + /// + public class BehaviorParameters : MonoBehaviour + { + + [HideInInspector] + [SerializeField] + private BrainParameters m_BrainParameters = new BrainParameters(); + [HideInInspector] [SerializeField] private NNModel m_Model; + [HideInInspector] [SerializeField] private InferenceDevice m_InferenceDevice; + [HideInInspector] [SerializeField] private bool m_UseHeuristic; + [HideInInspector] [SerializeField] private string m_BehaviorName = "My Behavior"; + + [HideInInspector] + public BrainParameters brainParameters + { + get { return m_BrainParameters; } + } + + [HideInInspector] + public string behaviorName + { + get { return m_BehaviorName; } + } + + public IPolicy GeneratePolicy(Func heuristic) + { + if (m_UseHeuristic) + { + return new HeuristicPolicy(heuristic); + } + if (FindObjectOfType().IsCommunicatorOn) + { + return new RemotePolicy(m_BrainParameters, m_BehaviorName); + } + if (m_Model != null) + { + return new BarracudaPolicy(m_BrainParameters, m_Model, m_InferenceDevice); + } + else + { + return new HeuristicPolicy(heuristic); + } + } + + public void GiveModel( + string behaviorName, + NNModel model, + InferenceDevice inferenceDevice = InferenceDevice.CPU) + { + m_Model = model; + m_InferenceDevice = inferenceDevice; + m_BehaviorName = behaviorName; + } + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Policy/BehaviorParameters.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Policy/BehaviorParameters.cs.meta new file mode 100644 index 0000000000..507c417e97 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Policy/BehaviorParameters.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 5d1c4e0b1822b495aa52bc52839ecb30 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Policy/BrainParameters.cs b/UnitySDK/Assets/ML-Agents/Scripts/Policy/BrainParameters.cs new file mode 100644 index 0000000000..e20ba3e6c1 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Policy/BrainParameters.cs @@ -0,0 +1,57 @@ +using System; +using UnityEngine; + +namespace MLAgents +{ + public enum SpaceType + { + Discrete, + Continuous + } + + /// + /// Holds information about the Brain. It defines what are the inputs and outputs of the + /// decision process. + /// + [Serializable] + public class BrainParameters + { + /// + /// If continuous : The length of the float vector that represents + /// the state + /// If discrete : The number of possible values the state can take + /// + public int vectorObservationSize = 1; + + [Range(1, 50)] public int numStackedVectorObservations = 1; + + /// + /// If continuous : The length of the float vector that represents + /// the action + /// If discrete : The number of possible values the action can take*/ + /// + public int[] vectorActionSize = new[] {1}; + + /// The list of strings describing what the actions correpond to */ + public string[] vectorActionDescriptions; + + /// Defines if the action is discrete or continuous + public SpaceType vectorActionSpaceType = SpaceType.Discrete; + + /// + /// Deep clones the BrainParameter object + /// + /// A new BrainParameter object with the same values as the original. + public BrainParameters Clone() + { + return new BrainParameters + { + vectorObservationSize = vectorObservationSize, + numStackedVectorObservations = numStackedVectorObservations, + vectorActionSize = (int[])vectorActionSize.Clone(), + vectorActionDescriptions = (string[])vectorActionDescriptions.Clone(), + vectorActionSpaceType = vectorActionSpaceType + }; + } + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/BrainParameters.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Policy/BrainParameters.cs.meta similarity index 100% rename from UnitySDK/Assets/ML-Agents/Scripts/BrainParameters.cs.meta rename to UnitySDK/Assets/ML-Agents/Scripts/Policy/BrainParameters.cs.meta diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Policy/HeuristicPolicy.cs b/UnitySDK/Assets/ML-Agents/Scripts/Policy/HeuristicPolicy.cs new file mode 100644 index 0000000000..03f98e9cd4 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Policy/HeuristicPolicy.cs @@ -0,0 +1,45 @@ +using UnityEngine; +using Barracuda; +using MLAgents.InferenceBrain; +using System; + +namespace MLAgents +{ + + /// + /// The Heuristic Policy uses a hards coded Heuristic method + /// to take decisions each time the RequestDecision method is + /// called. + /// + public class HeuristicPolicy : IPolicy + { + private Func m_Heuristic; + private Agent m_Agent; + + /// + public HeuristicPolicy(Func heuristic) + { + m_Heuristic = heuristic; + } + + /// + public void RequestDecision(Agent agent) + { + m_Agent = agent; + } + + /// + public void DecideAction() + { + if (m_Agent != null) + { + m_Agent.UpdateVectorAction(m_Heuristic.Invoke()); + } + } + + public void Dispose() + { + + } + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Policy/HeuristicPolicy.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Policy/HeuristicPolicy.cs.meta new file mode 100644 index 0000000000..ae074f5727 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Policy/HeuristicPolicy.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 8a55e3cea7fd643109e42f5f4c9a1425 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Policy/IPolicy.cs b/UnitySDK/Assets/ML-Agents/Scripts/Policy/IPolicy.cs new file mode 100644 index 0000000000..35c5bc13c0 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Policy/IPolicy.cs @@ -0,0 +1,30 @@ +using System; +using System.Collections.Generic; +using UnityEngine; + +namespace MLAgents +{ + /// + /// IPolicy is connected to a single Agent. Each time the agent needs + /// a decision, it will request a decision to the Policy. The decision + /// will not be taken immediately but will be taken before or when + /// DecideAction is called. + /// + public interface IPolicy : IDisposable + { + /// + /// Signals the Brain that the Agent needs a Decision. The Policy + /// will make the decision at a later time to allow possible + /// batching of requests. + /// + /// + void RequestDecision(Agent agent); + + /// + /// Signals the Policy that if the Decision has not been taken yet, + /// it must be taken now. The Brain is expected to update the actions + /// of the Agents at this point the latest. + /// + void DecideAction(); + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Policy/IPolicy.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Policy/IPolicy.cs.meta new file mode 100644 index 0000000000..f43c4ddc8b --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Policy/IPolicy.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 56e263dd566be41d6b81d0b46895a0dd +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Policy/RemotePolicy.cs b/UnitySDK/Assets/ML-Agents/Scripts/Policy/RemotePolicy.cs new file mode 100644 index 0000000000..6ad30fde75 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Policy/RemotePolicy.cs @@ -0,0 +1,86 @@ +using UnityEngine; +using System.Collections.Generic; + +namespace MLAgents +{ + /// + /// The Remote Policy only works when training. + /// When training your Agents, the RemotePolicy will be controlled by Python. + /// + public class RemotePolicy : IPolicy + { + + private string m_BehaviorName; + protected IBatchedDecisionMaker m_BatchedDecisionMaker; + + /// + /// Sensor shapes for the associated Agents. All Agents must have the same shapes for their sensors. + /// + List m_SensorShapes; + + /// + public RemotePolicy( + BrainParameters brainParameters, + string behaviorName) + { + m_BehaviorName = behaviorName; + var aca = GameObject.FindObjectOfType(); + aca.LazyInitialization(); + m_BatchedDecisionMaker = aca.Communicator; + aca.Communicator.SubscribeBrain(m_BehaviorName, brainParameters); + } + + /// + public void RequestDecision(Agent agent) + { +#if DEBUG + ValidateAgentSensorShapes(agent); +#endif + m_BatchedDecisionMaker?.PutObservations(m_BehaviorName, agent); + } + + /// + public void DecideAction() + { + m_BatchedDecisionMaker?.DecideBatch(); + } + + /// + /// Check that the Agent sensors are the same shape as the the other Agents using the same Brain. + /// If this is the first Agent being checked, its Sensor sizes will be saved. + /// + /// The Agent to check + private void ValidateAgentSensorShapes(Agent agent) + { + if (m_SensorShapes == null) + { + m_SensorShapes = new List(agent.m_Sensors.Count); + // First agent, save the sensor sizes + foreach (var sensor in agent.m_Sensors) + { + m_SensorShapes.Add(sensor.GetFloatObservationShape()); + } + } + else + { + // Check for compatibility with the other Agents' sensors + // TODO make sure this only checks once per agent + Debug.Assert(m_SensorShapes.Count == agent.m_Sensors.Count, $"Number of sensors must match. {m_SensorShapes.Count} != {agent.m_Sensors.Count}"); + for (var i = 0; i < m_SensorShapes.Count; i++) + { + var cachedShape = m_SensorShapes[i]; + var sensorShape = agent.m_Sensors[i].GetFloatObservationShape(); + Debug.Assert(cachedShape.Length == sensorShape.Length, "Sensor dimensions must match."); + for (var j = 0; j < cachedShape.Length; j++) + { + Debug.Assert(cachedShape[j] == sensorShape[j], "Sensor sizes much match."); + } + } + } + } + + public void Dispose() + { + } + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Policy/RemotePolicy.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Policy/RemotePolicy.cs.meta new file mode 100644 index 0000000000..08996fa8a2 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Policy/RemotePolicy.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 2f1ffc0e0bec14a1eaca4c709b3ba230 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/ResetParameters.cs b/UnitySDK/Assets/ML-Agents/Scripts/ResetParameters.cs index c8dec9d041..19ecb2c456 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/ResetParameters.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/ResetParameters.cs @@ -15,23 +15,30 @@ public struct ResetParameter public float value; } - [FormerlySerializedAs("resetParameters")] - [SerializeField] private List m_ResetParameters = new List(); + public ResetParameters() {} - public void OnBeforeSerialize() + public ResetParameters(IDictionary dict) : base(dict) { - m_ResetParameters.Clear(); + UpdateResetParameters(); + } + private void UpdateResetParameters() + { + m_ResetParameters.Clear(); foreach (var pair in this) { - var rp = new ResetParameter(); - rp.key = pair.Key; - - rp.value = pair.Value; - m_ResetParameters.Add(rp); + m_ResetParameters.Add(new ResetParameter { key = pair.Key, value = pair.Value }); } } + [FormerlySerializedAs("resetParameters")] + [SerializeField] private List m_ResetParameters = new List(); + + public void OnBeforeSerialize() + { + UpdateResetParameters(); + } + public void OnAfterDeserialize() { Clear(); diff --git a/UnitySDK/Assets/ML-Agents/Scripts/RpcCommunicator.cs b/UnitySDK/Assets/ML-Agents/Scripts/RpcCommunicator.cs deleted file mode 100644 index 7adda502d3..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/RpcCommunicator.cs +++ /dev/null @@ -1,183 +0,0 @@ -# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX -using Grpc.Core; -#endif -#if UNITY_EDITOR -using UnityEditor; -#endif -using UnityEngine; -using MLAgents.CommunicatorObjects; - -namespace MLAgents -{ - /// Responsible for communication with External using gRPC. - public class RpcCommunicator : ICommunicator - { - /// If true, the communication is active. - bool m_IsOpen; - -# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX - /// The Unity to External client. - UnityToExternal.UnityToExternalClient m_Client; -#endif - /// The communicator parameters sent at construction - CommunicatorParameters m_CommunicatorParameters; - - /// - /// Initializes a new instance of the RPCCommunicator class. - /// - /// Communicator parameters. - public RpcCommunicator(CommunicatorParameters communicatorParameters) - { - m_CommunicatorParameters = communicatorParameters; - } - - /// - /// Initialize the communicator by sending the first UnityOutput and receiving the - /// first UnityInput. The second UnityInput is stored in the unityInput argument. - /// - /// The first Unity Input. - /// The first Unity Output. - /// The second Unity input. - public UnityInput Initialize(UnityOutput unityOutput, - out UnityInput unityInput) - { -# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX - m_IsOpen = true; - var channel = new Channel( - "localhost:" + m_CommunicatorParameters.port, - ChannelCredentials.Insecure); - - m_Client = new UnityToExternal.UnityToExternalClient(channel); - var result = m_Client.Exchange(WrapMessage(unityOutput, 200)); - unityInput = m_Client.Exchange(WrapMessage(null, 200)).UnityInput; -#if UNITY_EDITOR -#if UNITY_2017_2_OR_NEWER - EditorApplication.playModeStateChanged += HandleOnPlayModeChanged; -#else - EditorApplication.playmodeStateChanged += HandleOnPlayModeChanged; -#endif -#endif - return result.UnityInput; -#else - throw new UnityAgentsException( - "You cannot perform training on this platform."); -#endif - } - - /// - /// Close the communicator gracefully on both sides of the communication. - /// - public void Close() - { -# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX - if (!m_IsOpen) - { - return; - } - - try - { - m_Client.Exchange(WrapMessage(null, 400)); - m_IsOpen = false; - } - catch - { - // ignored - } -#else - throw new UnityAgentsException( - "You cannot perform training on this platform."); -#endif - } - - /// - /// Send a UnityOutput and receives a UnityInput. - /// - /// The next UnityInput. - /// The UnityOutput to be sent. - public UnityInput Exchange(UnityOutput unityOutput) - { -# if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX - if (!m_IsOpen) - { - return null; - } - try - { - var message = m_Client.Exchange(WrapMessage(unityOutput, 200)); - if (message.Header.Status == 200) - { - return message.UnityInput; - } - else - { - m_IsOpen = false; - return null; - } - } - catch - { - m_IsOpen = false; - return null; - } -#else - throw new UnityAgentsException( - "You cannot perform training on this platform."); -#endif - } - - /// - /// Wraps the UnityOuptut into a message with the appropriate status. - /// - /// The UnityMessage corresponding. - /// The UnityOutput to be wrapped. - /// The status of the message. - private static UnityMessage WrapMessage(UnityOutput content, int status) - { - return new UnityMessage - { - Header = new Header { Status = status }, - UnityOutput = content - }; - } - - /// - /// When the Unity application quits, the communicator must be closed - /// - private void OnApplicationQuit() - { - Close(); - } - -#if UNITY_EDITOR -#if UNITY_2017_2_OR_NEWER - /// - /// When the editor exits, the communicator must be closed - /// - /// State. - private void HandleOnPlayModeChanged(PlayModeStateChange state) - { - // This method is run whenever the playmode state is changed. - if (state == PlayModeStateChange.ExitingPlayMode) - { - Close(); - } - } - -#else - /// - /// When the editor exits, the communicator must be closed - /// - private void HandleOnPlayModeChanged() - { - // This method is run whenever the playmode state is changed. - if (!EditorApplication.isPlayingOrWillChangePlaymode) - { - Close(); - } - } - -#endif -#endif - } -} diff --git a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains.meta b/UnitySDK/Assets/ML-Agents/Scripts/Sensor.meta similarity index 77% rename from UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains.meta rename to UnitySDK/Assets/ML-Agents/Scripts/Sensor.meta index 0cc8849fee..6237993f1c 100644 --- a/UnitySDK/Assets/ML-Agents/Examples/3DBall/Brains.meta +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 70626ce713f4a428e9b72ab06365ec0d +guid: 96db1714def024608a0ff58ed9eae82d folderAsset: yes DefaultImporter: externalObjects: {} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CameraSensor.cs b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CameraSensor.cs new file mode 100644 index 0000000000..574f226a81 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CameraSensor.cs @@ -0,0 +1,101 @@ +using System; +using MLAgents.InferenceBrain; +using UnityEngine; + +namespace MLAgents.Sensor +{ + public class CameraSensor : ISensor + { + private Camera m_Camera; + private int m_Width; + private int m_Height; + private bool m_Grayscale; + private string m_Name; + private int[] m_Shape; + + public CameraSensor(Camera camera, int width, int height, bool grayscale, string name) + { + m_Camera = camera; + m_Width = width; + m_Height = height; + m_Grayscale = grayscale; + m_Name = name; + m_Shape = new[] { width, height, grayscale ? 1 : 3 }; + } + + public string GetName() + { + return m_Name; + } + + public int[] GetFloatObservationShape() + { + return m_Shape; + } + + public byte[] GetCompressedObservation() + { + using (TimerStack.Instance.Scoped("CameraSensor.GetCompressedObservation")) + { + var texture = ObservationToTexture(m_Camera, m_Width, m_Height); + // TODO support more types here, e.g. JPG + var compressed = texture.EncodeToPNG(); + UnityEngine.Object.Destroy(texture); + return compressed; + } + } + + public void WriteToTensor(TensorProxy tensorProxy, int agentIndex) + { + using (TimerStack.Instance.Scoped("CameraSensor.WriteToTensor")) + { + var texture = ObservationToTexture(m_Camera, m_Width, m_Height); + Utilities.TextureToTensorProxy(texture, tensorProxy, m_Grayscale, agentIndex); + UnityEngine.Object.Destroy(texture); + } + } + + public SensorCompressionType GetCompressionType() + { + return SensorCompressionType.PNG; + } + + /// + /// Converts a m_Camera and corresponding resolution to a 2D texture. + /// + /// The 2D texture. + /// Camera. + /// Width of resulting 2D texture. + /// Height of resulting 2D texture. + /// Texture2D to render to. + public static Texture2D ObservationToTexture(Camera obsCamera, int width, int height) + { + var texture2D = new Texture2D(width, height, TextureFormat.RGB24, false); + var oldRec = obsCamera.rect; + obsCamera.rect = new Rect(0f, 0f, 1f, 1f); + var depth = 24; + var format = RenderTextureFormat.Default; + var readWrite = RenderTextureReadWrite.Default; + + var tempRt = + RenderTexture.GetTemporary(width, height, depth, format, readWrite); + + var prevActiveRt = RenderTexture.active; + var prevCameraRt = obsCamera.targetTexture; + + // render to offscreen texture (readonly from CPU side) + RenderTexture.active = tempRt; + obsCamera.targetTexture = tempRt; + + obsCamera.Render(); + + texture2D.ReadPixels(new Rect(0, 0, texture2D.width, texture2D.height), 0, 0); + + obsCamera.targetTexture = prevCameraRt; + obsCamera.rect = oldRec; + RenderTexture.active = prevActiveRt; + RenderTexture.ReleaseTemporary(tempRt); + return texture2D; + } + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CameraSensor.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CameraSensor.cs.meta new file mode 100644 index 0000000000..1a0314b8f7 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CameraSensor.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 41cb6bf4b09974bf583f5b2fef0c08a7 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CameraSensorComponent.cs b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CameraSensorComponent.cs new file mode 100644 index 0000000000..5afdb8d159 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CameraSensorComponent.cs @@ -0,0 +1,19 @@ +using System; +using UnityEngine; + +namespace MLAgents.Sensor +{ + public class CameraSensorComponent : SensorComponent + { + public new Camera camera; + public string sensorName = "CameraSensor"; + public int width = 84; + public int height = 84; + public bool grayscale = false; + + public override ISensor CreateSensor() + { + return new CameraSensor(camera, width, height, grayscale, sensorName); + } + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CameraSensorComponent.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CameraSensorComponent.cs.meta new file mode 100644 index 0000000000..307dc64952 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CameraSensorComponent.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 282f342c2ab144bf38be65d4d0c4e07d +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CompressedObservation.cs b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CompressedObservation.cs new file mode 100644 index 0000000000..5dfe6f85bf --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CompressedObservation.cs @@ -0,0 +1,24 @@ +using System; +using UnityEngine; + +namespace MLAgents.Sensor +{ + public struct CompressedObservation + { + /// + /// The compressed data. + /// + public byte[] Data; + + /// + /// The format of the compressed data + /// + public SensorCompressionType CompressionType; + + /// + /// The uncompressed dimensions of the data. + /// E.g. for RGB visual observations, this will be {Width, Height, 3} + /// + public int[] Shape; + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CompressedObservation.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CompressedObservation.cs.meta new file mode 100644 index 0000000000..973caf6fc2 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/CompressedObservation.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: e9292f3e3b1864e0fbe1e9bb9a29d4ed +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/ISensor.cs b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/ISensor.cs new file mode 100644 index 0000000000..49f8068955 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/ISensor.cs @@ -0,0 +1,54 @@ +using MLAgents.InferenceBrain; + +namespace MLAgents.Sensor +{ + public enum SensorCompressionType + { + None, + PNG, + } + + /// + /// Sensor interface for generating observations. + /// For custom implementations, it is recommended to SensorBase instead. + /// + public interface ISensor { + /// + /// Returns the size of the observations that will be generated. + /// For example, a sensor that observes the velocity of a rigid body (in 3D) would return new {3}. + /// A sensor that returns an RGB image would return new [] {Width, Height, 3} + /// + /// + int[] GetFloatObservationShape(); + + /// + /// Write the observation data directly to the TensorProxy. + /// This is considered an advanced interface; for a simpler approach, use SensorBase and override WriteFloats instead. + /// + /// + /// + void WriteToTensor(TensorProxy tensorProxy, int agentIndex); + + /// + /// Return a compressed representation of the observation. For small observations, this should generally not be + /// implemented. However, compressing large observations (such as visual results) can significantly improve + /// model training time. + /// + /// + byte[] GetCompressedObservation(); + + /// + /// Return the compression type being used. If no compression is used, return SensorCompressionType.None + /// + /// + SensorCompressionType GetCompressionType(); + + /// + /// Get the name of the sensor. This is used to ensure deterministic sorting of the sensors on an Agent, + /// so the naming must be consistent across all sensors and agents. + /// + /// The name of the sensor + string GetName(); + } + +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/ISensor.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/ISensor.cs.meta new file mode 100644 index 0000000000..d8ceedec70 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/ISensor.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 4bb5e09a94c6d4cd9a46c60b084e4952 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/RenderTextureSensor.cs b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/RenderTextureSensor.cs new file mode 100644 index 0000000000..871854c809 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/RenderTextureSensor.cs @@ -0,0 +1,97 @@ +using System; +using System.Threading; +using MLAgents.InferenceBrain; +using UnityEngine; + +namespace MLAgents.Sensor +{ + class RenderTextureSensor : ISensor + { + private RenderTexture m_RenderTexture; + private int m_Width; + private int m_Height; + private bool m_Grayscale; + private string m_Name; + private int[] m_Shape; + + public RenderTextureSensor(RenderTexture renderTexture, int width, int height, bool grayscale, string name) + { + m_RenderTexture = renderTexture; + m_Width = width; + m_Height = height; + m_Grayscale = grayscale; + m_Name = name; + m_Shape = new[] { width, height, grayscale ? 1 : 3 }; + } + + public string GetName() + { + return m_Name; + } + + public int[] GetFloatObservationShape() + { + return m_Shape; + } + + public byte[] GetCompressedObservation() + { + using(TimerStack.Instance.Scoped("RenderTexSensor.GetCompressedObservation")) + { + var texture = ObservationToTexture(m_RenderTexture, m_Width, m_Height); + // TODO support more types here, e.g. JPG + var compressed = texture.EncodeToPNG(); + UnityEngine.Object.Destroy(texture); + return compressed; + } + } + + public void WriteToTensor(TensorProxy tensorProxy, int index) + { + using (TimerStack.Instance.Scoped("RenderTexSensor.GetCompressedObservation")) + { + var texture = ObservationToTexture(m_RenderTexture, m_Width, m_Height); + Utilities.TextureToTensorProxy(texture, tensorProxy, m_Grayscale, index); + UnityEngine.Object.Destroy(texture); + } + } + + public SensorCompressionType GetCompressionType() + { + return SensorCompressionType.PNG; + } + + /// + /// Converts a RenderTexture and correspinding resolution to a 2D texture. + /// + /// The 2D texture. + /// RenderTexture. + /// Width of resulting 2D texture. + /// Height of resulting 2D texture. + /// Texture2D to render to. + public static Texture2D ObservationToTexture(RenderTexture obsTexture, int width, int height) + { + var texture2D = new Texture2D(width, height, TextureFormat.RGB24, false); + + if (width != texture2D.width || height != texture2D.height) + { + texture2D.Resize(width, height); + } + + if (width != obsTexture.width || height != obsTexture.height) + { + throw new UnityAgentsException(string.Format( + "RenderTexture {0} : width/height is {1}/{2} brain is expecting {3}/{4}.", + obsTexture.name, obsTexture.width, obsTexture.height, width, height)); + } + + var prevActiveRt = RenderTexture.active; + RenderTexture.active = obsTexture; + + texture2D.ReadPixels(new Rect(0, 0, texture2D.width, texture2D.height), 0, 0); + texture2D.Apply(); + RenderTexture.active = prevActiveRt; + return texture2D; + } + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/RenderTextureSensor.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/RenderTextureSensor.cs.meta new file mode 100644 index 0000000000..28a1dff767 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/RenderTextureSensor.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 958f1f6bb9058405cae3c03266ad9899 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/RenderTextureSensorComponent.cs b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/RenderTextureSensorComponent.cs new file mode 100644 index 0000000000..a19a532052 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/RenderTextureSensorComponent.cs @@ -0,0 +1,19 @@ +using System; +using UnityEngine; + +namespace MLAgents.Sensor +{ + public class RenderTextureSensorComponent : SensorComponent + { + public RenderTexture renderTexture; + public string sensorName = "RenderTextureSensor"; + public int width = 84; + public int height = 84; + public bool grayscale = false; + + public override ISensor CreateSensor() + { + return new RenderTextureSensor(renderTexture, width, height, grayscale, sensorName); + } + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/RenderTextureSensorComponent.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/RenderTextureSensorComponent.cs.meta new file mode 100644 index 0000000000..542ca3e278 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/RenderTextureSensorComponent.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 132e1194facb64429b007ea1edf562d0 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/SensorBase.cs b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/SensorBase.cs new file mode 100644 index 0000000000..15eb53bee6 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/SensorBase.cs @@ -0,0 +1,54 @@ +using MLAgents.InferenceBrain; +using UnityEngine; + +namespace MLAgents.Sensor +{ + public abstract class SensorBase : ISensor + { + /// + /// Write the observations to the output buffer. This size of the buffer will be product of the sizes returned + /// by GetFloatObservationShape(). + /// + /// + public abstract void WriteObservation(float[] output); + + public abstract int[] GetFloatObservationShape(); + + public abstract string GetName(); + + /// + /// Default implementation of WriteToTensor interface. This creates a temporary array, calls WriteObservation, + /// and then writes the results to the TensorProxy. + /// + /// + /// + public virtual void WriteToTensor(TensorProxy tensorProxy, int agentIndex) + { + // TODO reuse buffer for similar agents, don't call GetFloatObservationShape() + int[] shape = GetFloatObservationShape(); + int numFloats = 1; + foreach (var dim in shape) + { + numFloats *= dim; + } + + float[] buffer = new float[numFloats]; + WriteObservation(buffer); + + for (var i = 0; i < numFloats; i++) + { + tensorProxy.data[agentIndex, i] = buffer[i]; + } + } + + public virtual byte[] GetCompressedObservation() + { + return null; + } + + public virtual SensorCompressionType GetCompressionType() + { + return SensorCompressionType.None; + } + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/SensorBase.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/SensorBase.cs.meta new file mode 100644 index 0000000000..c724240207 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/SensorBase.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 553b05a1b59a94260b3e545f13190389 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/SensorComponent.cs b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/SensorComponent.cs new file mode 100644 index 0000000000..177846a7b5 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/SensorComponent.cs @@ -0,0 +1,18 @@ +using System; +using UnityEngine; + +namespace MLAgents.Sensor +{ + /// + /// Editor components for creating Sensors. Generally an ISensor implementation should have a corresponding + /// SensorComponent to create it. + /// + public abstract class SensorComponent : MonoBehaviour + { + /// + /// Create the ISensor. This is called by the Agent when it is initialized. + /// + /// + public abstract ISensor CreateSensor(); + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Sensor/SensorComponent.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/SensorComponent.cs.meta new file mode 100644 index 0000000000..5576281e12 --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Sensor/SensorComponent.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 4f1dad589959a4b598d09e54f61fbe02 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/SocketCommunicator.cs b/UnitySDK/Assets/ML-Agents/Scripts/SocketCommunicator.cs deleted file mode 100644 index 9dceb45370..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/SocketCommunicator.cs +++ /dev/null @@ -1,181 +0,0 @@ -using Google.Protobuf; -using System.Net.Sockets; -using UnityEngine; -using MLAgents.CommunicatorObjects; -using System.Threading.Tasks; -#if UNITY_EDITOR -using UnityEditor; -#endif - -namespace MLAgents -{ - public class SocketCommunicator : ICommunicator - { - private const float k_TimeOut = 10f; - private const int k_MessageLength = 12000; - byte[] m_MessageHolder = new byte[k_MessageLength]; - int m_ComPort; - Socket m_Sender; - byte[] m_LengthHolder = new byte[4]; - CommunicatorParameters m_CommunicatorParameters; - - - public SocketCommunicator(CommunicatorParameters communicatorParameters) - { - m_CommunicatorParameters = communicatorParameters; - } - - /// - /// Initialize the communicator by sending the first UnityOutput and receiving the - /// first UnityInput. The second UnityInput is stored in the unityInput argument. - /// - /// The first Unity Input. - /// The first Unity Output. - /// The second Unity input. - public UnityInput Initialize(UnityOutput unityOutput, - out UnityInput unityInput) - { - m_Sender = new Socket( - AddressFamily.InterNetwork, - SocketType.Stream, - ProtocolType.Tcp); - m_Sender.Connect("localhost", m_CommunicatorParameters.port); - - var initializationInput = - UnityMessage.Parser.ParseFrom(Receive()); - - Send(WrapMessage(unityOutput, 200).ToByteArray()); - - unityInput = UnityMessage.Parser.ParseFrom(Receive()).UnityInput; -#if UNITY_EDITOR -#if UNITY_2017_2_OR_NEWER - EditorApplication.playModeStateChanged += HandleOnPlayModeChanged; -#else - EditorApplication.playmodeStateChanged += HandleOnPlayModeChanged; -#endif -#endif - return initializationInput.UnityInput; - } - - /// - /// Uses the socke to receive a byte[] from External. Reassemble a message that was split - /// by External if it was too long. - /// - /// The byte[] sent by External. - byte[] Receive() - { - m_Sender.Receive(m_LengthHolder); - var totalLength = System.BitConverter.ToInt32(m_LengthHolder, 0); - var location = 0; - var result = new byte[totalLength]; - while (location != totalLength) - { - var fragment = m_Sender.Receive(m_MessageHolder); - System.Buffer.BlockCopy( - m_MessageHolder, 0, result, location, fragment); - location += fragment; - } - return result; - } - - /// - /// Send the specified input via socket to External. Split the message into smaller - /// parts if it is too long. - /// - /// The byte[] to be sent. - void Send(byte[] input) - { - var newArray = new byte[input.Length + 4]; - input.CopyTo(newArray, 4); - System.BitConverter.GetBytes(input.Length).CopyTo(newArray, 0); - m_Sender.Send(newArray); - } - - /// - /// Close the communicator gracefully on both sides of the communication. - /// - public void Close() - { - Send(WrapMessage(null, 400).ToByteArray()); - } - - /// - /// Send a UnityOutput and receives a UnityInput. - /// - /// The next UnityInput. - /// The UnityOutput to be sent. - public UnityInput Exchange(UnityOutput unityOutput) - { - Send(WrapMessage(unityOutput, 200).ToByteArray()); - byte[] received = null; - var task = Task.Run(() => received = Receive()); - if (!task.Wait(System.TimeSpan.FromSeconds(k_TimeOut))) - { - throw new UnityAgentsException( - "The communicator took too long to respond."); - } - - var message = UnityMessage.Parser.ParseFrom(received); - - if (message.Header.Status != 200) - { - return null; - } - return message.UnityInput; - } - - /// - /// Wraps the UnityOuptut into a message with the appropriate status. - /// - /// The UnityMessage corresponding. - /// The UnityOutput to be wrapped. - /// The status of the message. - private static UnityMessage WrapMessage(UnityOutput content, int status) - { - return new UnityMessage - { - Header = new Header { Status = status }, - UnityOutput = content - }; - } - - /// - /// When the Unity application quits, the communicator must be closed - /// - private void OnApplicationQuit() - { - Close(); - } - -#if UNITY_EDITOR -#if UNITY_2017_2_OR_NEWER - /// - /// When the editor exits, the communicator must be closed - /// - /// State. - private void HandleOnPlayModeChanged(PlayModeStateChange state) - { - // This method is run whenever the playmode state is changed. - if (state == PlayModeStateChange.ExitingPlayMode) - { - Close(); - } - } - -#else - /// - /// When the editor exits, the communicator must be closed - /// - private void HandleOnPlayModeChanged() - { - // This method is run whenever the playmode state is changed. - if (!EditorApplication.isPlayingOrWillChangePlaymode) - { - Close(); - } - } - -#endif -#endif - } -} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/SocketCommunicator.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/SocketCommunicator.cs.meta deleted file mode 100644 index 23ac272f1b..0000000000 --- a/UnitySDK/Assets/ML-Agents/Scripts/SocketCommunicator.cs.meta +++ /dev/null @@ -1,13 +0,0 @@ -fileFormatVersion: 2 -guid: f0901c57c84a54f25aa5955165072493 -timeCreated: 1523046536 -licenseType: Free -MonoImporter: - externalObjects: {} - serializedVersion: 2 - defaultReferences: [] - executionOrder: 0 - icon: {instanceID: 0} - userData: - assetBundleName: - assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Timer.cs b/UnitySDK/Assets/ML-Agents/Scripts/Timer.cs new file mode 100644 index 0000000000..edf7f3d15b --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Timer.cs @@ -0,0 +1,346 @@ +// Compile with: csc CRefTest.cs -doc:Results.xml +using UnityEngine; +using System.Collections.Generic; +using System.IO; +using UnityEngine.Profiling; +using System.Runtime.Serialization; +using System.Runtime.Serialization.Json; +#if UNITY_EDITOR +using UnityEditor; + +#endif + + +namespace MLAgents +{ + [DataContract] + public class TimerNode + { + static string s_Separator = "."; + static double s_TicksToSeconds = 1e-7; // 100 ns per tick + + /// + /// Full name of the node. This is the node's parents full name concatenated with this node's name + /// + string m_FullName; + + /// + /// Child nodes, indexed by name. + /// + [DataMember(Name = "children", Order = 999)] + Dictionary m_Children; + + /// + /// Custom sampler used to add timings to the profiler. + /// + private CustomSampler m_Sampler; + + /// + /// Number of total ticks elapsed for this node. + /// + long m_TotalTicks; + + /// + /// If the node is currently running, the time (in ticks) when the node was started. + /// If the node is not running, is set to 0. + /// + long m_TickStart; + + /// + /// Number of times the corresponding code block has been called. + /// + [DataMember(Name = "count")] + int m_NumCalls; + + /// + /// The total recorded ticks for the timer node, plus the currently elapsed ticks + /// if the timer is still running (i.e. if m_TickStart is non-zero). + /// + public long CurrentTicks + { + get + { + var currentTicks = m_TotalTicks; + if (m_TickStart != 0) + { + currentTicks += (System.DateTime.Now.Ticks - m_TickStart); + } + + return currentTicks; + } + } + + /// + /// Total elapsed seconds. + /// + [DataMember(Name = "total")] + public double TotalSeconds + { + get { return CurrentTicks * s_TicksToSeconds; } + set {} // Serialization needs this, but unused. + } + + /// + /// Total seconds spent in this block, excluding it's children. + /// + [DataMember(Name = "self")] + public double SelfSeconds + { + get + { + long totalChildTicks = 0; + if (m_Children != null) + { + foreach (var child in m_Children.Values) + { + totalChildTicks += child.m_TotalTicks; + } + } + + var selfTicks = Mathf.Max(0, CurrentTicks - totalChildTicks); + return selfTicks * s_TicksToSeconds; + } + set {} // Serialization needs this, but unused. + } + + public IReadOnlyDictionary Children + { + get { return m_Children; } + } + + public int NumCalls + { + get { return m_NumCalls; } + } + + public TimerNode(string name, bool isRoot = false) + { + m_FullName = name; + if (isRoot) + { + // The root node is considered always running. This means that when we output stats, it'll + // have a sensible value for total time (the running time since reset). + // The root node doesn't have a sampler since that could interfere with the profiler. + m_NumCalls = 1; + m_TickStart = System.DateTime.Now.Ticks; + } + else + { + m_Sampler = CustomSampler.Create(m_FullName); + } + } + + /// + /// Start timing a block of code. + /// + public void Begin() + { + m_Sampler?.Begin(); + m_TickStart = System.DateTime.Now.Ticks; + } + + /// + /// Stop timing a block of code, and increment internal counts. + /// + public void End() + { + var elapsed = System.DateTime.Now.Ticks - m_TickStart; + m_TotalTicks += elapsed; + m_TickStart = 0; + m_NumCalls++; + m_Sampler?.End(); + } + + /// + /// Return a child node for the given name. + /// The children dictionary will be created if it does not already exist, and + /// a new Node will be created if it's not already in the dictionary. + /// Note that these allocations only happen once for a given timed block. + /// + /// + /// + public TimerNode GetChild(string name) + { + // Lazily create the children dictionary. + if (m_Children == null) + { + m_Children = new Dictionary(); + } + + if (!m_Children.ContainsKey(name)) + { + var childFullName = m_FullName + s_Separator + name; + var newChild = new TimerNode(childFullName); + m_Children[name] = newChild; + return newChild; + } + + return m_Children[name]; + } + + /// + /// Recursively form a string representing the current timer information. + /// + /// + /// + /// + public string DebugGetTimerString(string parentName = "", int level = 0) + { + var indent = new string(' ', 2 * level); // TODO generalize + var shortName = (level == 0) ? m_FullName : m_FullName.Replace(parentName + s_Separator, ""); + string timerString; + if (level == 0) + { + timerString = $"{shortName}(root)\n"; + } + else + { + timerString = $"{indent}{shortName}\t\traw={TotalSeconds} rawCount={m_NumCalls}\n"; + } + + // TODO use StringBuilder? might be overkill since this is only debugging code? + if (m_Children != null) + { + foreach (var c in m_Children.Values) + { + timerString += c.DebugGetTimerString(m_FullName, level + 1); + } + } + return timerString; + } + } + + /// + /// A "stack" of timers that allows for lightweight hierarchical profiling of long-running processes. + /// + /// Example usage: + /// + /// using(TimerStack.Instance.Scoped("foo")) + /// { + /// doSomeWork(); + /// for (int i=0; i<5; i++) + /// { + /// using(myTimer.Scoped("bar")) + /// { + /// doSomeMoreWork(); + /// } + /// } + /// } + /// + /// + /// + /// + /// This implements the Singleton pattern (solution 4) as described in + /// https://csharpindepth.com/articles/singleton + /// + public class TimerStack : System.IDisposable + { + private static readonly TimerStack k_Instance = new TimerStack(); + + Stack m_Stack; + TimerNode m_RootNode; + + // Explicit static constructor to tell C# compiler + // not to mark type as beforefieldinit + static TimerStack() + { + } + + private TimerStack() + { + Reset(); + } + + public void Reset(string name = "root") + { + m_Stack = new Stack(); + m_RootNode = new TimerNode(name, true); + m_Stack.Push(m_RootNode); + } + + public static TimerStack Instance + { + get { return k_Instance; } + } + + public TimerNode RootNode + { + get { return m_RootNode; } + } + + private void Push(string name) + { + var current = m_Stack.Peek(); + var next = current.GetChild(name); + m_Stack.Push(next); + next.Begin(); + } + + private void Pop() + { + var node = m_Stack.Pop(); + node.End(); + } + + /// + /// Start a scoped timer. This should be used with the "using" statement. + /// + /// + /// + public TimerStack Scoped(string name) + { + Push(name); + return this; + } + + /// + /// Closes the current scoped timer. This should never be called directly, only + /// at the end of a "using" statement. + /// Note that the instance is not actually disposed of; this is just to allow it to be used + /// conveniently with "using". + /// + public void Dispose() + { + Pop(); + } + + /// + /// Get a string representation of the timers. + /// Potentially slow so call sparingly. + /// + /// + public string DebugGetTimerString() + { + return m_RootNode.DebugGetTimerString(); + } + + /// + /// Save the timers in JSON format to the provided filename. + /// If the filename is null, a default one will be used. + /// + /// + public void SaveJsonTimers(string filename = null) + { + if (filename == null) + { + var fullpath = Path.GetFullPath("."); + filename = $"{fullpath}/csharp_timers.json"; + } + var fs = new FileStream(filename, FileMode.Create, FileAccess.Write); + SaveJsonTimers(fs); + fs.Close(); + } + + /// + /// Write the timers in JSON format to the provided stream. + /// + /// + public void SaveJsonTimers(Stream stream) + { + var jsonSettings = new DataContractJsonSerializerSettings(); + jsonSettings.UseSimpleDictionaryFormat = true; + var ser = new DataContractJsonSerializer(typeof(TimerNode), jsonSettings); + ser.WriteObject(stream, m_RootNode); + } + } +} diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Timer.cs.meta b/UnitySDK/Assets/ML-Agents/Scripts/Timer.cs.meta new file mode 100644 index 0000000000..e28315908d --- /dev/null +++ b/UnitySDK/Assets/ML-Agents/Scripts/Timer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: d268f7dfcc74c47939e1fc520adb8d81 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/UnitySDK/Assets/ML-Agents/Scripts/Utilities.cs b/UnitySDK/Assets/ML-Agents/Scripts/Utilities.cs index 1c0170ef3d..9e7cf3d589 100644 --- a/UnitySDK/Assets/ML-Agents/Scripts/Utilities.cs +++ b/UnitySDK/Assets/ML-Agents/Scripts/Utilities.cs @@ -28,31 +28,66 @@ public static void TextureToTensorProxy( var numTextures = textures.Count; var width = textures[0].width; var height = textures[0].height; - var data = tensorProxy.data; for (var t = 0; t < numTextures; t++) { - var texturePixels = textures[t].GetPixels32(); - for (var h = height - 1; h >= 0; h--) + var texture = textures[t]; + Debug.Assert(width == texture.width, "All Textures must have the same dimension"); + Debug.Assert(height == texture.height, "All Textures must have the same dimension"); + TextureToTensorProxy(texture, tensorProxy, grayScale, t); + } + } + + /// + /// Puts a Texture2D into a TensorProxy. + /// + /// + /// The texture to be put into the tensor. + /// + /// + /// TensorProxy to fill with Texture data. + /// + /// + /// If set to true the textures will be converted to grayscale before + /// being stored in the tensor. + /// + /// + /// Index of the texture being written. + /// + public static void TextureToTensorProxy( + Texture2D texture, + TensorProxy tensorProxy, + bool grayScale, + int textureOffset = 0) + { + var width = texture.width; + var height = texture.height; + var data = tensorProxy.data; + + var t = textureOffset; + var texturePixels = texture.GetPixels32(); + // During training, we convert from Texture to PNG before sending to the trainer, which has the + // effect of flipping the image. We need another flip here at inference time to match this. + for (var h = height - 1; h >= 0; h--) + { + for (var w = 0; w < width; w++) { - for (var w = 0; w < width; w++) + var currentPixel = texturePixels[(height - h - 1) * width + w]; + if (grayScale) { - var currentPixel = texturePixels[(height - h - 1) * width + w]; - if (grayScale) - { - data[t, h, w, 0] = - (currentPixel.r + currentPixel.g + currentPixel.b) / 3f / 255.0f; - } - else - { - // For Color32, the r, g and b values are between 0 and 255. - data[t, h, w, 0] = currentPixel.r / 255.0f; - data[t, h, w, 1] = currentPixel.g / 255.0f; - data[t, h, w, 2] = currentPixel.b / 255.0f; - } + data[t, h, w, 0] = + (currentPixel.r + currentPixel.g + currentPixel.b) / 3f / 255.0f; + } + else + { + // For Color32, the r, g and b values are between 0 and 255. + data[t, h, w, 0] = currentPixel.r / 255.0f; + data[t, h, w, 1] = currentPixel.g / 255.0f; + data[t, h, w, 2] = currentPixel.b / 255.0f; } } } + } /// diff --git a/UnitySDK/README.md b/UnitySDK/README.md index 731d5b9f1a..903e70fec3 100644 --- a/UnitySDK/README.md +++ b/UnitySDK/README.md @@ -1,5 +1,5 @@ # Unity ML-Agents SDK -Contains the ML-Agents Unity Project, including -both the core plugin (in `Scripts`), as well as a set +Contains the ML-Agents Unity Project, including +both the core plugin (in `Scripts`), as well as a set of example environments (in `Examples`). \ No newline at end of file diff --git a/UnitySDK/UnitySDK.sln.DotSettings b/UnitySDK/UnitySDK.sln.DotSettings index e1d484280f..2bc3f33d99 100644 --- a/UnitySDK/UnitySDK.sln.DotSettings +++ b/UnitySDK/UnitySDK.sln.DotSettings @@ -3,6 +3,7 @@ CPU GPU NN + RL True True diff --git a/config/curricula/wall-jump/BigWallJumpLearning.json b/config/curricula/wall-jump/BigWallJump.json similarity index 100% rename from config/curricula/wall-jump/BigWallJumpLearning.json rename to config/curricula/wall-jump/BigWallJump.json diff --git a/config/curricula/wall-jump/SmallWallJumpLearning.json b/config/curricula/wall-jump/SmallWallJump.json similarity index 100% rename from config/curricula/wall-jump/SmallWallJumpLearning.json rename to config/curricula/wall-jump/SmallWallJump.json diff --git a/config/gail_config.yaml b/config/gail_config.yaml index 82fde73c49..da91c9e694 100644 --- a/config/gail_config.yaml +++ b/config/gail_config.yaml @@ -21,7 +21,7 @@ default: strength: 1.0 gamma: 0.99 -PyramidsLearning: +Pyramids: summary_freq: 2000 time_horizon: 128 batch_size: 128 @@ -49,7 +49,7 @@ PyramidsLearning: encoding_size: 128 demo_path: demos/ExpertPyramid.demo -CrawlerStaticLearning: +CrawlerStatic: normalize: true num_epoch: 3 time_horizon: 1000 @@ -66,7 +66,7 @@ CrawlerStaticLearning: encoding_size: 128 demo_path: demos/ExpertCrawlerSta.demo -PushBlockLearning: +PushBlock: max_steps: 5.0e4 batch_size: 128 buffer_size: 2048 @@ -82,7 +82,7 @@ PushBlockLearning: encoding_size: 128 demo_path: demos/ExpertPush.demo -HallwayLearning: +Hallway: use_recurrent: true sequence_length: 64 num_layers: 2 diff --git a/config/offline_bc_config.yaml b/config/offline_bc_config.yaml index 1f8d7f0d35..5c0fd4b6c0 100644 --- a/config/offline_bc_config.yaml +++ b/config/offline_bc_config.yaml @@ -12,7 +12,7 @@ default: memory_size: 256 demo_path: ./UnitySDK/Assets/Demonstrations/.demo -HallwayLearning: +Hallway: trainer: offline_bc max_steps: 5.0e5 num_epoch: 5 diff --git a/config/online_bc_config.yaml b/config/online_bc_config.yaml deleted file mode 100644 index 66d0e3f9be..0000000000 --- a/config/online_bc_config.yaml +++ /dev/null @@ -1,110 +0,0 @@ -default: - trainer: online_bc - brain_to_imitate: - batch_size: 64 - time_horizon: 64 - summary_freq: 1000 - max_steps: 5.0e4 - batches_per_epoch: 10 - use_recurrent: false - hidden_units: 128 - learning_rate: 3.0e-4 - num_layers: 2 - sequence_length: 32 - memory_size: 256 - -FoodCollectorLearning: - trainer: online_bc - max_steps: 10000 - summary_freq: 1000 - brain_to_imitate: FoodCollectorPlayer - batch_size: 16 - batches_per_epoch: 5 - num_layers: 4 - hidden_units: 64 - use_recurrent: false - sequence_length: 16 - -BouncerLearning: - trainer: online_bc - max_steps: 10000 - summary_freq: 10 - brain_to_imitate: BouncerPlayer - batch_size: 16 - batches_per_epoch: 1 - num_layers: 1 - hidden_units: 64 - use_recurrent: false - sequence_length: 16 - -HallwayLearning: - trainer: online_bc - max_steps: 10000 - summary_freq: 1000 - brain_to_imitate: HallwayPlayer - batch_size: 16 - batches_per_epoch: 5 - num_layers: 4 - hidden_units: 64 - use_recurrent: false - sequence_length: 16 - -PushBlockLearning: - trainer: online_bc - max_steps: 10000 - summary_freq: 1000 - brain_to_imitate: PushBlockPlayer - batch_size: 16 - batches_per_epoch: 5 - num_layers: 4 - hidden_units: 64 - use_recurrent: false - sequence_length: 16 - -PyramidsLearning: - trainer: online_bc - max_steps: 10000 - summary_freq: 1000 - brain_to_imitate: PyramidsPlayer - batch_size: 16 - batches_per_epoch: 5 - num_layers: 4 - hidden_units: 64 - use_recurrent: false - sequence_length: 16 - -TennisLearning: - trainer: online_bc - max_steps: 10000 - summary_freq: 1000 - brain_to_imitate: TennisPlayer - batch_size: 16 - batches_per_epoch: 5 - num_layers: 4 - hidden_units: 64 - use_recurrent: false - sequence_length: 16 - -StudentBrain: - trainer: online_bc - max_steps: 10000 - summary_freq: 1000 - brain_to_imitate: TeacherBrain - batch_size: 16 - batches_per_epoch: 5 - num_layers: 4 - hidden_units: 64 - use_recurrent: false - sequence_length: 16 - -StudentRecurrentBrain: - trainer: online_bc - max_steps: 10000 - summary_freq: 1000 - brain_to_imitate: TeacherBrain - batch_size: 16 - batches_per_epoch: 5 - num_layers: 4 - hidden_units: 64 - use_recurrent: true - sequence_length: 32 \ No newline at end of file diff --git a/config/sac_trainer_config.yaml b/config/sac_trainer_config.yaml index 706d8ded83..718778e4f6 100644 --- a/config/sac_trainer_config.yaml +++ b/config/sac_trainer_config.yaml @@ -24,7 +24,7 @@ default: strength: 1.0 gamma: 0.99 -FoodCollectorLearning: +FoodCollector: normalize: false batch_size: 256 buffer_size: 500000 @@ -32,14 +32,14 @@ FoodCollectorLearning: init_entcoef: 0.05 train_interval: 1 -BouncerLearning: +Bouncer: normalize: true max_steps: 5.0e5 num_layers: 2 hidden_units: 64 summary_freq: 1000 -PushBlockLearning: +PushBlock: max_steps: 5.0e4 init_entcoef: 0.05 hidden_units: 256 @@ -47,7 +47,7 @@ PushBlockLearning: time_horizon: 64 num_layers: 2 -SmallWallJumpLearning: +SmallWallJump: max_steps: 1.0e6 hidden_units: 256 summary_freq: 2000 @@ -56,7 +56,7 @@ SmallWallJumpLearning: num_layers: 2 normalize: false -BigWallJumpLearning: +BigWallJump: max_steps: 1.0e6 hidden_units: 256 summary_freq: 2000 @@ -65,7 +65,7 @@ BigWallJumpLearning: init_entcoef: 0.1 normalize: false -StrikerLearning: +Striker: max_steps: 5.0e5 learning_rate: 1e-3 hidden_units: 256 @@ -75,7 +75,7 @@ StrikerLearning: num_layers: 2 normalize: false -GoalieLearning: +Goalie: max_steps: 5.0e5 learning_rate: 1e-3 hidden_units: 256 @@ -85,7 +85,7 @@ GoalieLearning: num_layers: 2 normalize: false -PyramidsLearning: +Pyramids: summary_freq: 2000 time_horizon: 128 batch_size: 128 @@ -109,7 +109,7 @@ PyramidsLearning: use_actions: true demo_path: demos/ExpertPyramid.demo -VisualPyramidsLearning: +VisualPyramids: time_horizon: 128 batch_size: 64 hidden_units: 256 @@ -130,7 +130,7 @@ VisualPyramidsLearning: use_actions: true demo_path: demos/ExpertPyramid.demo -3DBallLearning: +3DBall: normalize: true batch_size: 64 buffer_size: 12000 @@ -139,18 +139,18 @@ VisualPyramidsLearning: hidden_units: 64 init_entcoef: 0.5 -3DBallHardLearning: +3DBallHard: normalize: true batch_size: 256 summary_freq: 1000 time_horizon: 1000 -TennisLearning: +Tennis: buffer_size: 500000 normalize: true max_steps: 2e5 -CrawlerStaticLearning: +CrawlerStatic: normalize: true time_horizon: 1000 batch_size: 256 @@ -167,7 +167,7 @@ CrawlerStaticLearning: strength: 1.0 gamma: 0.995 -CrawlerDynamicLearning: +CrawlerDynamic: normalize: true time_horizon: 1000 batch_size: 256 @@ -182,7 +182,7 @@ CrawlerDynamicLearning: strength: 1.0 gamma: 0.995 -WalkerLearning: +Walker: normalize: true time_horizon: 1000 batch_size: 256 @@ -197,7 +197,7 @@ WalkerLearning: strength: 1.0 gamma: 0.995 -ReacherLearning: +Reacher: normalize: true time_horizon: 1000 batch_size: 128 @@ -205,7 +205,7 @@ ReacherLearning: max_steps: 2e5 summary_freq: 3000 -HallwayLearning: +Hallway: use_recurrent: true sequence_length: 32 num_layers: 2 @@ -217,7 +217,7 @@ HallwayLearning: time_horizon: 64 use_recurrent: true -VisualHallwayLearning: +VisualHallway: use_recurrent: true sequence_length: 32 num_layers: 1 @@ -230,7 +230,7 @@ VisualHallwayLearning: time_horizon: 64 use_recurrent: true -VisualPushBlockLearning: +VisualPushBlock: use_recurrent: true sequence_length: 32 num_layers: 1 @@ -243,12 +243,13 @@ VisualPushBlockLearning: summary_freq: 1000 time_horizon: 64 -GridWorldLearning: +GridWorld: batch_size: 128 normalize: false num_layers: 1 hidden_units: 128 - init_entcoef: 0.01 + init_entcoef: 0.5 + buffer_init_steps: 1000 buffer_size: 50000 max_steps: 5.0e5 summary_freq: 2000 @@ -258,7 +259,7 @@ GridWorldLearning: strength: 1.0 gamma: 0.9 -BasicLearning: +Basic: batch_size: 64 normalize: false num_layers: 2 diff --git a/config/trainer_config.yaml b/config/trainer_config.yaml index 60df2f12c0..9480e3bb49 100644 --- a/config/trainer_config.yaml +++ b/config/trainer_config.yaml @@ -23,20 +23,20 @@ default: strength: 1.0 gamma: 0.99 -FoodCollectorLearning: +FoodCollector: normalize: false beta: 5.0e-3 batch_size: 1024 buffer_size: 10240 max_steps: 1.0e5 -BouncerLearning: +Bouncer: normalize: true max_steps: 1.0e6 num_layers: 2 hidden_units: 64 -PushBlockLearning: +PushBlock: max_steps: 5.0e4 batch_size: 128 buffer_size: 2048 @@ -46,7 +46,7 @@ PushBlockLearning: time_horizon: 64 num_layers: 2 -SmallWallJumpLearning: +SmallWallJump: max_steps: 1.0e6 batch_size: 128 buffer_size: 2048 @@ -57,7 +57,7 @@ SmallWallJumpLearning: num_layers: 2 normalize: false -BigWallJumpLearning: +BigWallJump: max_steps: 1.0e6 batch_size: 128 buffer_size: 2048 @@ -68,7 +68,7 @@ BigWallJumpLearning: num_layers: 2 normalize: false -StrikerLearning: +Striker: max_steps: 5.0e5 learning_rate: 1e-3 batch_size: 128 @@ -81,7 +81,7 @@ StrikerLearning: num_layers: 2 normalize: false -GoalieLearning: +Goalie: max_steps: 5.0e5 learning_rate: 1e-3 batch_size: 320 @@ -94,7 +94,7 @@ GoalieLearning: num_layers: 2 normalize: false -PyramidsLearning: +Pyramids: summary_freq: 2000 time_horizon: 128 batch_size: 128 @@ -113,7 +113,7 @@ PyramidsLearning: gamma: 0.99 encoding_size: 256 -VisualPyramidsLearning: +VisualPyramids: time_horizon: 128 batch_size: 64 buffer_size: 2024 @@ -131,7 +131,7 @@ VisualPyramidsLearning: gamma: 0.99 encoding_size: 256 -3DBallLearning: +3DBall: normalize: true batch_size: 64 buffer_size: 12000 @@ -140,7 +140,7 @@ VisualPyramidsLearning: lambd: 0.99 beta: 0.001 -3DBallHardLearning: +3DBallHard: normalize: true batch_size: 1200 buffer_size: 12000 @@ -153,11 +153,11 @@ VisualPyramidsLearning: strength: 1.0 gamma: 0.995 -TennisLearning: +Tennis: normalize: true max_steps: 2e5 -CrawlerStaticLearning: +CrawlerStatic: normalize: true num_epoch: 3 time_horizon: 1000 @@ -172,7 +172,7 @@ CrawlerStaticLearning: strength: 1.0 gamma: 0.995 -CrawlerDynamicLearning: +CrawlerDynamic: normalize: true num_epoch: 3 time_horizon: 1000 @@ -187,7 +187,7 @@ CrawlerDynamicLearning: strength: 1.0 gamma: 0.995 -WalkerLearning: +Walker: normalize: true num_epoch: 3 time_horizon: 1000 @@ -202,7 +202,7 @@ WalkerLearning: strength: 1.0 gamma: 0.995 -ReacherLearning: +Reacher: normalize: true num_epoch: 3 time_horizon: 1000 @@ -215,7 +215,7 @@ ReacherLearning: strength: 1.0 gamma: 0.995 -HallwayLearning: +Hallway: use_recurrent: true sequence_length: 64 num_layers: 2 @@ -229,7 +229,7 @@ HallwayLearning: summary_freq: 1000 time_horizon: 64 -VisualHallwayLearning: +VisualHallway: use_recurrent: true sequence_length: 64 num_layers: 1 @@ -243,7 +243,7 @@ VisualHallwayLearning: summary_freq: 1000 time_horizon: 64 -VisualPushBlockLearning: +VisualPushBlock: use_recurrent: true sequence_length: 32 num_layers: 1 @@ -257,7 +257,7 @@ VisualPushBlockLearning: summary_freq: 1000 time_horizon: 64 -GridWorldLearning: +GridWorld: batch_size: 32 normalize: false num_layers: 1 @@ -272,7 +272,7 @@ GridWorldLearning: strength: 1.0 gamma: 0.9 -BasicLearning: +Basic: batch_size: 32 normalize: false num_layers: 1 diff --git a/docs/Background-TensorFlow.md b/docs/Background-TensorFlow.md index 5d305aa4e3..3c071c067e 100644 --- a/docs/Background-TensorFlow.md +++ b/docs/Background-TensorFlow.md @@ -17,7 +17,7 @@ performing computations using data flow graphs, the underlying representation of deep learning models. It facilitates training and inference on CPUs and GPUs in a desktop, server, or mobile device. Within the ML-Agents toolkit, when you train the behavior of an agent, the output is a TensorFlow model (.nn) file -that you can then embed within a Learning Brain. Unless you implement a new +that you can then associate with an Agent. Unless you implement a new algorithm, the use of TensorFlow is mostly abstracted away and behind the scenes. diff --git a/docs/Basic-Guide.md b/docs/Basic-Guide.md index 551873938d..ac9b03101a 100644 --- a/docs/Basic-Guide.md +++ b/docs/Basic-Guide.md @@ -11,7 +11,7 @@ the basic concepts of Unity. ## Setting up the ML-Agents Toolkit within Unity In order to use the ML-Agents toolkit within Unity, you first need to change a few -Unity settings. +Unity settings. 1. Launch Unity 2. On the Projects dialog, choose the **Open** option at the top of the window. @@ -27,34 +27,28 @@ Unity settings. ## Running a Pre-trained Model -We include pre-trained models for our agents (`.nn` files) and we use the -[Unity Inference Engine](Unity-Inference-Engine.md) to run these models -inside Unity. In this section, we will use the pre-trained model for the +We include pre-trained models for our agents (`.nn` files) and we use the +[Unity Inference Engine](Unity-Inference-Engine.md) to run these models +inside Unity. In this section, we will use the pre-trained model for the 3D Ball example. 1. In the **Project** window, go to the `Assets/ML-Agents/Examples/3DBall/Scenes` folder and open the `3DBall` scene file. -2. In the **Project** window, go to the `Assets/ML-Agents/Examples/3DBall/Prefabs` folder. - Expand `Game` and click on the `Platform` prefab. You should see the `Platform` prefab in the **Inspector** window. - - **Note**: The platforms in the `3DBall` scene were created using the `Platform` prefab. Instead of updating all 12 platforms individually, you can update the `Platform` prefab instead. - +2. In the **Project** window, go to the `Assets/ML-Agents/Examples/3DBall/Prefabs` folder. + Expand `3DBall` and click on the `Agent` prefab. You should see the `Agent` prefab in the **Inspector** window. + + **Note**: The platforms in the `3DBall` scene were created using the `3DBall` prefab. Instead of updating all 12 platforms individually, you can update the `3DBall` prefab instead. + ![Platform Prefab](images/platform_prefab.png) - -3. In the **Project** window, drag the **3DBallLearning** Brain located in - `Assets/ML-Agents/Examples/3DBall/Brains` into the `Brain` property under `Ball 3D Agent (Script)` component in the **Inspector** window. - + +3. In the **Project** window, drag the **3DBallLearning** Model located in + `Assets/ML-Agents/Examples/3DBall/TFModels` into the `Model` property under `Ball 3D Agent (Script)` component in the **Inspector** window. + ![3dball learning brain](images/3dball_learning_brain.png) - -4. You should notice that each `Platform` under each `Game` in the **Hierarchy** windows now contains **3DBallLearning** as `Brain`. __Note__ : You can modify multiple game objects in a scene by selecting them all at - once using the search bar in the Scene Hierarchy. -5. In the **Project** window, click on the **3DBallLearning** Brain located in - `Assets/ML-Agents/Examples/3DBall/Brains`. You should see the properties in the **Inspector** window. -6. In the **Project** window, open the `Assets/ML-Agents/Examples/3DBall/TFModels` - folder. -7. Drag the `3DBallLearning` model file from the `Assets/ML-Agents/Examples/3DBall/TFModels` - folder to the **Model** field of the **3DBallLearning** Brain in the **Inspector** window. __Note__ : All of the brains should now have `3DBallLearning` as the TensorFlow model in the `Model` property -8. Select the **InferenceDevice** to use for this model (CPU or GPU). + +4. You should notice that each `Agent` under each `3DBall` in the **Hierarchy** windows now contains **3DBallLearning** as `Model`. __Note__ : You can modify multiple game objects in a scene by selecting them all at + once using the search bar in the Scene Hierarchy. +8. Select the **InferenceDevice** to use for this model (CPU or GPU) on the Agent. _Note: CPU is faster for the majority of ML-Agents toolkit generated models_ 9. Click the **Play** button and you will see the platforms balance the balls using the pre-trained model. @@ -70,33 +64,22 @@ Within `Basics`, be sure to set `env_name` to the name of the Unity executable if you want to [use an executable](Learning-Environment-Executable.md) or to `None` if you want to interact with the current scene in the Unity Editor. -Before building the environment or interacting with it in the editor, select `Ball3DAcademy` in the **Hierarchy** window of the Unity editor and make sure `Control` checkbox is checked under `Ball 3D Academy` component. - More information and documentation is provided in the [Python API](Python-API.md) page. -## Training the Brain with Reinforcement Learning +## Training the Model with Reinforcement Learning ### Setting up the environment for training -To set up the environment for training, you will need to specify which agents are contributing -to the training and which Brain is being trained. You can only perform training with -a `Learning Brain`. - -1. Each platform agent needs an assigned `Learning Brain`. In this example, each platform agent was created using a prefab. To update all of the brains in each platform agent at once, you only need to update the platform agent prefab. In the **Project** window, go to the `Assets/ML-Agents/Examples/3DBall/Prefabs` folder. You should see the `3DBall` prefab in the **Inspector** window. Now expand the prefab and select `Agent` which should appear now on your **Inspector** window. In the **Project** window, drag the **3DBallLearning** Brain located in `Assets/ML-Agents/Examples/3DBall/Brains` into the `Brain` property under `Ball 3D Agent (Script)` component in the **Inspector** window. - - **Note**: The Unity prefab system will modify all instances of the agent properties in your scene. If the agent does not synchronize automatically with the prefab, you can hit the Revert button in the top of the **Inspector** window. - -2. In the **Hierarchy** window, select `Ball3DAcademy`. -3. In the **Project** window, go to `Assets/ML-Agents/Examples/3DBall/Brains` folder and drag the **3DBallLearning** Brain to the `Brains` property under `Braodcast Hub` in the `Ball3DAcademy` object in the **Inspector** window. In order to train, make sure the `Control` checkbox is selected. - - **Note:** Assigning a Brain to an agent (dragging a Brain into the `Brain` property of -the agent) means that the Brain will be making decision for that agent. Whereas dragging -a Brain into the Broadcast Hub means that the Brain will be exposed to the Python process. -The `Control` checkbox means that in addition to being exposed to Python, the Brain will -be controlled by the Python process (required for training). - - ![Set Brain to External](images/mlagents-SetBrainToTrain.png) +In order to setup the Agents for Training, you will need to edit the +`Behavior Name` under `BehaviorParamters` in the Agent Inspector window. +The `Behavior Name` is used to group agents per behaviors. Note that Agents +sharing the same `Behavior Name` must be agents of the same type using the +same `Behavior Parameters`. You can make sure all your agents have the same +`Behavior Parameters` using Prefabs. +The `Behavior Name` corresponds to the name of the model that will be +generated by the training process and is used to select the hyperparameters +from the training configuration file. ### Training the environment @@ -224,25 +207,24 @@ INFO:mlagents.trainers: first-run-0: 3DBallLearning: Step: 10000. Mean Reward: 2 ### After training You can press Ctrl+C to stop the training, and your trained model will be at -`models//.nn` where -`` is the name of the Brain corresponding to the model. +`models//.nn` where +`` is the name of the `Behavior Name` of the agents corresponding to the model. (**Note:** There is a known bug on Windows that causes the saving of the model to fail when you early terminate the training, it's recommended to wait until Step has reached the max_steps parameter you set in trainer_config.yaml.) This file corresponds to your model's latest checkpoint. You can now embed this trained -model into your Learning Brain by following the steps below, which is similar to +model into your Agents by following the steps below, which is similar to the steps described [above](#running-a-pre-trained-model). 1. Move your model file into `UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/`. 2. Open the Unity Editor, and select the **3DBall** scene as described above. -3. Select the **3DBallLearning** Learning Brain from the Scene hierarchy. -4. Drag the `.nn` file from the Project window of - the Editor to the **Model** placeholder in the **3DBallLearning** +3. Select the **3DBall** prefab Agent object. +4. Drag the `.nn` file from the Project window of + the Editor to the **Model** placeholder in the **Ball3DAgent** inspector window. -5. Select Ball3DAcademy in the scene and toggle off Control, each platform's brain now regains control. -6. Press the :arrow_forward: button at the top of the Editor. +5. Press the :arrow_forward: button at the top of the Editor. ## Next Steps diff --git a/docs/Creating-Custom-Protobuf-Messages.md b/docs/Creating-Custom-Protobuf-Messages.md index 3a0de7c30f..2b22acef5a 100644 --- a/docs/Creating-Custom-Protobuf-Messages.md +++ b/docs/Creating-Custom-Protobuf-Messages.md @@ -3,7 +3,7 @@ # Creating Custom Protobuf Messages -Unity and Python communicate by sending protobuf messages to and from each other. You can create custom protobuf messages if you want to exchange structured data beyond what is included by default. +Unity and Python communicate by sending protobuf messages to and from each other. You can create custom protobuf messages if you want to exchange structured data beyond what is included by default. ## Implementing a Custom Message @@ -17,7 +17,7 @@ There are three custom message types currently supported - Custom Actions, Custo By default, the Python API sends actions to Unity in the form of a floating point list and an optional string-valued text action for each agent. -You can define a custom action type, to either replace or augment the default, by adding fields to the `CustomAction` message, which you can do by editing the file `protobuf-definitions/proto/mlagents/envs/communicator_objects/custom_action.proto`. +You can define a custom action type, to either replace or augment the default, by adding fields to the `CustomAction` message, which you can do by editing the file `protobuf-definitions/proto/mlagents/envs/communicator_objects/custom_action.proto`. Instances of custom actions are set via the `custom_action` parameter of the `env.step`. An agent receives a custom action by defining a method with the signature: @@ -25,7 +25,7 @@ Instances of custom actions are set via the `custom_action` parameter of the `en public virtual void AgentAction(float[] vectorAction, string textAction, CommunicatorObjects.CustomAction customAction) ``` -Below is an example of creating a custom action that instructs an agent to choose a cardinal direction to walk in and how far to walk. +Below is an example of creating a custom action that instructs an agent to choose a cardinal direction to walk in and how far to walk. The `custom_action.proto` file looks like: @@ -42,7 +42,7 @@ message CustomAction { EAST=2; WEST=3; } - float walkAmount = 1; + float walkAmount = 1; Direction direction = 2; } ``` @@ -81,7 +81,7 @@ Keep in mind that the protobuffer compiler automatically configures the capitali ### Custom Reset Parameters -By default, you can configure an environment `env` in the Python API by specifying a `config` parameter that is a dictionary mapping strings to floats. +By default, you can configure an environment `env` in the Python API by specifying a `config` parameter that is a dictionary mapping strings to floats. You can also configure the environment reset using a custom protobuf message. To do this, add fields to the `CustomResetParameters` protobuf message in `custom_reset_parameters.proto`, analogously to `CustomAction` above. Then pass an instance of the message to `env.reset` via the `custom_reset_parameters` keyword parameter. @@ -140,9 +140,9 @@ public class MyAcademy : Academy ### Custom Observations -By default, Unity returns observations to Python in the form of a floating-point vector. +By default, Unity returns observations to Python in the form of a floating-point vector. -You can define a custom observation message to supplement that. To do so, add fields to the `CustomObservation` protobuf message in `custom_observation.proto`. +You can define a custom observation message to supplement that. To do so, add fields to the `CustomObservation` protobuf message in `custom_observation.proto`. Then in your agent, create an instance of a custom observation via `new CommunicatorObjects.CustomObservation`. Then in `CollectObservations`, call `SetCustomObservation` with the custom observation instance as the parameter. @@ -156,7 +156,7 @@ class MyAgent : Agent { var obs = new CustomObservation(); obs.CustomField = 1.0; SetCustomObservation(obs); - } + } } ``` @@ -165,7 +165,7 @@ In Python, the custom field would be accessed like: ```python ... result = env.step(...) -result[brain_name].custom_observations[0].customField +result[behavior_name].custom_observations[0].customField ``` -where `brain_name` is the name of the brain attached to the agent. +where `behavior_name` is the `Behavior Name` property of the Agent. diff --git a/docs/FAQ.md b/docs/FAQ.md index 9552abbc9f..2b4457437b 100644 --- a/docs/FAQ.md +++ b/docs/FAQ.md @@ -44,12 +44,7 @@ UnityAgentsException: The Communicator was unable to connect. Please make sure t There may be a number of possible causes: -* _Cause_: There may be no LearningBrain with `Control` option checked in the - `Broadcast Hub` of the Academy. In this case, the environment will not attempt - to communicate with Python. _Solution_: Click `Add New` in your Academy's - `Broadcast Hub`, and drag your LearningBrain asset into the `Brains` field, - and check the `Control` toggle. Also you need to assign this LearningBrain - asset to all of the Agents you wish to do training on. +* _Cause_: There may be no agent in the scene * _Cause_: On OSX, the firewall may be preventing communication with the environment. _Solution_: Add the built environment binary to the list of exceptions on the firewall by following diff --git a/docs/Feature-Memory.md b/docs/Feature-Memory.md index 6147e39cf6..d35d49af40 100644 --- a/docs/Feature-Memory.md +++ b/docs/Feature-Memory.md @@ -9,7 +9,7 @@ It is now possible to give memories to your agents. When training, the agents will be able to store a vector of floats to be used next time they need to make a decision. -![Brain Inspector](images/ml-agents-LSTM.png) +![Inspector](images/ml-agents-LSTM.png) Deciding what the agents should remember in order to solve a task is not easy to do by hand, but our training algorithms can learn to keep track of what is @@ -19,7 +19,7 @@ important to remember with ## How to use When configuring the trainer parameters in the `config/trainer_config.yaml` -file, add the following parameters to the Brain you want to use. +file, add the following parameters to the Behavior you want to use. ```json use_recurrent: true diff --git a/docs/Getting-Started-with-Balance-Ball.md b/docs/Getting-Started-with-Balance-Ball.md index 76db9af813..f45fd09104 100644 --- a/docs/Getting-Started-with-Balance-Ball.md +++ b/docs/Getting-Started-with-Balance-Ball.md @@ -32,7 +32,7 @@ and Unity, see the [installation instructions](Installation.md). An agent is an autonomous actor that observes and interacts with an _environment_. In the context of Unity, an environment is a scene containing an -Academy and one or more Brain and Agent objects, and, of course, the other +Academy and one or more Agent objects, and, of course, the other entities that an agent interacts with. ![Unity Editor](images/mlagents-3DBallHierarchy.png) @@ -45,7 +45,7 @@ window. The Inspector shows every component on a GameObject. The first thing you may notice after opening the 3D Balance Ball scene is that it contains not one, but several agent cubes. Each agent cube in the scene is an -independent agent, but they all share the same Brain. 3D Balance Ball does this +independent agent, but they all share the same Behavior. 3D Balance Ball does this to speed up training since all twelve agents contribute to training in parallel. ### Academy @@ -53,10 +53,6 @@ to speed up training since all twelve agents contribute to training in parallel. The Academy object for the scene is placed on the Ball3DAcademy GameObject. When you look at an Academy component in the inspector, you can see several properties that control how the environment works. -The **Broadcast Hub** keeps track of which Brains will send data during training. -If a Brain is added to the hub, the data from this Brain will be sent to the external training -process. If the `Control` checkbox is checked, the training process will be able to -control and train the agents linked to the Brain. The **Training Configuration** and **Inference Configuration** properties set the graphics and timescale properties for the Unity application. The Academy uses the **Training Configuration** during training and the @@ -86,57 +82,6 @@ The 3D Balance Ball environment does not use these functions — each Agent rese itself when needed — but many environments do use these functions to control the environment around the Agents. -### Brain - -As of v0.6, a Brain is a Unity asset and exists within the `UnitySDK` folder. These brains (ex. **3DBallLearning.asset**) are loaded into each Agent object (ex. **Ball3DAgents**). A Brain doesn't store any information about an Agent, it just -routes the Agent's collected observations to the decision making process and -returns the chosen action to the Agent. All Agents can share the same -Brain, but would act independently. The Brain settings tell you quite a bit about how -an Agent works. - -You can create new Brain assets by selecting `Assets -> -Create -> ML-Agents -> Brain`. There are 3 types of Brains. -The **Learning Brain** is a Brain that uses a trained neural network to make decisions. -When the `Control` box is checked in the Brains property under the **Broadcast Hub** in the Academy, the external process that is training the neural network will take over decision making for the agents -and ultimately generate a trained neural network. You can also use the -**Learning Brain** with a pre-trained model. -The **Heuristic** Brain allows you to hand-code the Agent logic by extending -the Decision class. -Finally, the **Player** Brain lets you map keyboard commands to actions, which -can be useful when testing your agents and environment. You can also implement your own type of Brain. - -In this tutorial, you will use the **Learning Brain** for training. - -#### Vector Observation Space - -Before making a decision, an agent collects its observation about its state in -the world. The vector observation is a vector of floating point numbers which -contain relevant information for the agent to make decisions. - -The Brain instance used in the 3D Balance Ball example uses the **Continuous** -vector observation space with a **State Size** of 8. This means that the feature -vector containing the Agent's observations contains eight elements: the `x` and -`z` components of the agent cube's rotation and the `x`, `y`, and `z` components -of the ball's relative position and velocity. (The observation values are -defined in the Agent's `CollectObservations()` function.) - -#### Vector Action Space - -An Agent is given instructions from the Brain in the form of *actions*. -ML-Agents toolkit classifies actions into two types: the **Continuous** vector -action space is a vector of numbers that can vary continuously. What each -element of the vector means is defined by the Agent logic (the PPO training -process just learns what values are better given particular state observations -based on the rewards received when it tries different values). For example, an -element might represent a force or torque applied to a `Rigidbody` in the Agent. -The **Discrete** action vector space defines its actions as tables. An action -given to the Agent is an array of indices into tables. - -The 3D Balance Ball example is programmed to use both types of vector action -space. You can try training with both settings to observe whether there is a -difference. (Set the `Vector Action Space Size` to 4 when using the discrete -action space and 2 when using continuous.) - ### Agent The Agent is the actor that observes and takes actions in the environment. In @@ -144,11 +89,9 @@ the 3D Balance Ball environment, the Agent components are placed on the twelve "Agent" GameObjects. The base Agent object has a few properties that affect its behavior: -* **Brain** — Every Agent must have a Brain. The Brain determines how an Agent - makes decisions. All the Agents in the 3D Balance Ball scene share the same - Brain. -* **Visual Observations** — Defines any Camera objects used by the Agent to - observe its environment. 3D Balance Ball does not use camera observations. +* **Behavior Parameters** — Every Agent must have a Behavior. The Behavior + determines how an Agent makes decisions. More on Behavior Parameters in + the next section. * **Max Step** — Defines how many simulation steps can occur before the Agent decides it is done. In 3D Balance Ball, an Agent restarts after 5000 steps. * **Reset On Done** — Defines whether an Agent starts over when it is finished. @@ -165,22 +108,54 @@ The Ball3DAgent subclass defines the following methods: training generalizes to more than a specific starting position and agent cube attitude. * agent.CollectObservations() — Called every simulation step. Responsible for - collecting the Agent's observations of the environment. Since the Brain - instance assigned to the Agent is set to the continuous vector observation + collecting the Agent's observations of the environment. Since the Behavior + Parameters of the Agent are set with vector observation space with a state size of 8, the `CollectObservations()` must call - `AddVectorObs` such that vector size adds up to 8. + `AddVectorObs` such that vector size adds up to 8. * agent.AgentAction() — Called every simulation step. Receives the action chosen - by the Brain. The Ball3DAgent example handles both the continuous and the - discrete action space types. There isn't actually much difference between the - two state types in this environment — both vector action spaces result in a + by the Agent. The vector action spaces result in a small change in the agent cube's rotation at each step. The `AgentAction()` function assigns a reward to the Agent; in this example, an Agent receives a small positive reward for each step it keeps the ball on the agent cube's head and a larger, negative reward for dropping the ball. An Agent is also marked as done when it drops the ball so that it will reset with a new ball for the next simulation step. +* agent.Heuristic() - When the `Use Heuristic` checkbox is checked in the Behavior + Parameters of the Agent, the Agent will use the `Heuristic()` method to generate + the actions of the Agent. As such, the `Heuristic()` method returns an array of + floats. In the case of the Ball 3D Agent, the `Heuristic()` method converts the + keyboard inputs into actions. + + +#### Behavior Parameters : Vector Observation Space + +Before making a decision, an agent collects its observation about its state in +the world. The vector observation is a vector of floating point numbers which +contain relevant information for the agent to make decisions. + +The Behavior Parameters of the 3D Balance Ball example uses a **Space Size** of 8. +This means that the feature +vector containing the Agent's observations contains eight elements: the `x` and +`z` components of the agent cube's rotation and the `x`, `y`, and `z` components +of the ball's relative position and velocity. (The observation values are +defined in the Agent's `CollectObservations()` function.) + +#### Behavior Parameters : Vector Action Space + +An Agent is given instructions in the form of a float array of *actions*. +ML-Agents toolkit classifies actions into two types: the **Continuous** vector +action space is a vector of numbers that can vary continuously. What each +element of the vector means is defined by the Agent logic (the training +process just learns what values are better given particular state observations +based on the rewards received when it tries different values). For example, an +element might represent a force or torque applied to a `Rigidbody` in the Agent. +The **Discrete** action vector space defines its actions as tables. An action +given to the Agent is an array of indices into tables. + +The 3D Balance Ball example is programmed to use continuous action +space with `Space Size` of 2. -## Training the Brain with Reinforcement Learning +## Training with Reinforcement Learning Now that we have an environment, we can perform the training. @@ -275,11 +250,11 @@ From TensorBoard, you will see the summary statistics: ![Example TensorBoard Run](images/mlagents-TensorBoard.png) -## Embedding the Trained Brain into the Unity Environment (Experimental) +## Embedding the Model into the Unity Environment Once the training process completes, and the training process saves the model (denoted by the `Saved Model` message) you can add it to the Unity project and -use it with Agents having a **Learning Brain**. +use it with compatible Agents (the Agents that generated the model). __Note:__ Do not just close the Unity Window once the `Saved Model` message appears. Either wait for the training process to close the window or press Ctrl+C at the command-line prompt. If you close the window manually, the `.nn` file @@ -288,6 +263,6 @@ containing the trained model is not exported into the ml-agents folder. ### Embedding the trained model into Unity To embed the trained model into Unity, follow the later part of [Training the -Brain with Reinforcement -Learning](Basic-Guide.md#training-the-brain-with-reinforcement-learning) section +Model with Reinforcement +Learning](Basic-Guide.md#training-the-model-with-reinforcement-learning) section of the Basic Guide page. diff --git a/docs/Glossary.md b/docs/Glossary.md index 1a3954a4c8..055f470f52 100644 --- a/docs/Glossary.md +++ b/docs/Glossary.md @@ -6,13 +6,13 @@ environment. * **Agent** - Unity Component which produces observations and takes actions in the environment. Agents actions are determined by decisions produced by a - linked Brain. -* **Brain** - Unity Asset which makes decisions for the agents linked to it. -* **Decision** - The specification produced by a Brain for an action to be + Policy. +* **Policy** - The decision making mechanism, typically a neural network model. +* **Decision** - The specification produced by a Policy for an action to be carried out given an observation. * **Editor** - The Unity Editor, which may include any pane (e.g. Hierarchy, Scene, Inspector). -* **Environment** - The Unity scene which contains Agents, Academy, and Brains. +* **Environment** - The Unity scene which contains Agents and the Academy. * **FixedUpdate** - Unity method called each time the game engine is stepped. ML-Agents logic should be placed here. * **Frame** - An instance of rendering the main camera for the display. @@ -30,5 +30,5 @@ logic should not be placed here. * **External Coordinator** - ML-Agents class responsible for communication with outside processes (in this case, the Python API). -* **Trainer** - Python class which is responsible for training a given - Brain. Contains TensorFlow graph which makes decisions for Learning Brain. +* **Trainer** - Python class which is responsible for training a given + group of Agents. diff --git a/docs/Installation-Windows.md b/docs/Installation-Windows.md deleted file mode 100644 index f4454aa48b..0000000000 --- a/docs/Installation-Windows.md +++ /dev/null @@ -1,351 +0,0 @@ -# Installing ML-Agents Toolkit for Windows - -The ML-Agents toolkit supports Windows 10. While it might be possible to run the -ML-Agents toolkit using other versions of Windows, it has not been tested on -other versions. Furthermore, the ML-Agents toolkit has not been tested on a -Windows VM such as Bootcamp or Parallels. - -To use the ML-Agents toolkit, you install Python and the required Python -packages as outlined below. This guide also covers how set up GPU-based training -(for advanced users). GPU-based training is not currently required for the -ML-Agents toolkit. However, training on a GPU might be required by future -versions and features. - -## Step 1: Install Python via Anaconda - -[Download](https://www.anaconda.com/download/#windows) and install Anaconda for -Windows. By using Anaconda, you can manage separate environments for different -distributions of Python. Python 3.6.1 or higher is required as we no longer support -Python 2. In this guide, we are using Python version 3.6 and Anaconda version -5.1 -([64-bit](https://repo.continuum.io/archive/Anaconda3-5.1.0-Windows-x86_64.exe) -or [32-bit](https://repo.continuum.io/archive/Anaconda3-5.1.0-Windows-x86.exe) -direct links). - -

- Anaconda Install -

- -We recommend the default _advanced installation options_. However, select the -options appropriate for your specific situation. - -

- Anaconda Install -

- -After installation, you must open __Anaconda Navigator__ to finish the setup. -From the Windows search bar, type _anaconda navigator_. You can close Anaconda -Navigator after it opens. - -If environment variables were not created, you will see error "conda is not -recognized as internal or external command" when you type `conda` into the -command line. To solve this you will need to set the environment variable -correctly. - -Type `environment variables` in the search bar (this can be reached by hitting -the Windows key or the bottom left Windows button). You should see an option -called __Edit the system environment variables__. - -

- edit env variables -

- -From here, click the __Environment Variables__ button. Double click "Path" under -__System variable__ to edit the "Path" variable, click __New__ to add the -following new paths. - -```console -%UserProfile%\Anaconda3\Scripts -%UserProfile%\Anaconda3\Scripts\conda.exe -%UserProfile%\Anaconda3 -%UserProfile%\Anaconda3\python.exe -``` - -## Step 2: Setup and Activate a New Conda Environment - -You will create a new [Conda environment](https://conda.io/docs/) to be used -with the ML-Agents toolkit. This means that all the packages that you install -are localized to just this environment. It will not affect any other -installation of Python or other environments. Whenever you want to run -ML-Agents, you will need activate this Conda environment. - -To create a new Conda environment, open a new Anaconda Prompt (_Anaconda Prompt_ -in the search bar) and type in the following command: - -```sh -conda create -n ml-agents python=3.6 -``` - -You may be asked to install new packages. Type `y` and press enter _(make sure -you are connected to the Internet)_. You must install these required packages. -The new Conda environment is called ml-agents and uses Python version 3.6. - -

- Anaconda Install -

- -To use this environment, you must activate it. _(To use this environment In the -future, you can run the same command)_. In the same Anaconda Prompt, type in the -following command: - -```sh -activate ml-agents -``` - -You should see `(ml-agents)` prepended on the last line. - -Next, install `tensorflow`. Install this package using `pip` - which is a -package management system used to install Python packages. Latest versions of -TensorFlow won't work, so you will need to make sure that you install version -1.7.1. In the same Anaconda Prompt, type in the following command _(make sure -you are connected to the Internet)_: - -```sh -pip install tensorflow==1.7.1 -``` - -## Step 3: Install Required Python Packages - -The ML-Agents toolkit depends on a number of Python packages. Use `pip` to -install these Python dependencies. - -If you haven't already, clone the ML-Agents Toolkit Github repository to your -local computer. You can do this using Git ([download -here](https://git-scm.com/download/win)) and running the following commands in -an Anaconda Prompt _(if you open a new prompt, be sure to activate the ml-agents -Conda environment by typing `activate ml-agents`)_: - -```sh -git clone https://github.com/Unity-Technologies/ml-agents.git -``` - -If you don't want to use Git, you can always directly download all the files -[here](https://github.com/Unity-Technologies/ml-agents/archive/master.zip). - -The `UnitySDK` subdirectory contains the Unity Assets to add to your projects. -It also contains many [example environments](Learning-Environment-Examples.md) -to help you get started. - -The `ml-agents` subdirectory contains a Python package which provides deep reinforcement -learning trainers to use with Unity environments. - -The `ml-agents-envs` subdirectory contains a Python API to interface with Unity, which -the `ml-agents` package depends on. - -The `gym-unity` subdirectory contains a package to interface with OpenAI Gym. - -Keep in mind where the files were downloaded, as you will need the -trainer config files in this directory when running `mlagents-learn`. -Make sure you are connected to the Internet and then type in the Anaconda -Prompt: - -```console -pip install mlagents -``` - -This will complete the installation of all the required Python packages to run -the ML-Agents toolkit. - -Sometimes on Windows, when you use pip to install certain Python packages, the pip will get stuck when trying to read the cache of the package. If you see this, you can try: - -```console -pip install mlagents --no-cache-dir -``` - -This `--no-cache-dir` tells the pip to disable the cache. - -### Installing for Development - -If you intend to make modifications to `ml-agents` or `ml-agents-envs`, you should install -the packages from the cloned repo rather than from PyPi. To do this, you will need to install - `ml-agents` and `ml-agents-envs` separately. - -In our example, the files are located in `C:\Downloads`. After you have either -cloned or downloaded the files, from the Anaconda Prompt, change to the ml-agents -subdirectory inside the ml-agents directory: - -```console -cd C:\Downloads\ml-agents -``` - -From the repo's main directory, now run: - -```console -cd ml-agents-envs -pip install -e . -cd .. -cd ml-agents -pip install -e . -``` - -Running pip with the `-e` flag will let you make changes to the Python files directly and have those -reflected when you run `mlagents-learn`. It is important to install these packages in this order as the -`mlagents` package depends on `mlagents_envs`, and installing it in the other -order will download `mlagents_envs` from PyPi. - -## (Optional) Step 4: GPU Training using The ML-Agents Toolkit - -GPU is not required for the ML-Agents toolkit and won't speed up the PPO -algorithm a lot during training(but something in the future will benefit from -GPU). This is a guide for advanced users who want to train using GPUs. -Additionally, you will need to check if your GPU is CUDA compatible. Please -check Nvidia's page [here](https://developer.nvidia.com/cuda-gpus). - -Currently for the ML-Agents toolkit, only CUDA v9.0 and cuDNN v7.0.5 is supported. - -### Install Nvidia CUDA toolkit - -[Download](https://developer.nvidia.com/cuda-toolkit-archive) and install the -CUDA toolkit 9.0 from Nvidia's archive. The toolkit includes GPU-accelerated -libraries, debugging and optimization tools, a C/C++ (Step Visual Studio 2017) -compiler and a runtime library and is needed to run the ML-Agents toolkit. In -this guide, we are using version -[9.0.176](https://developer.nvidia.com/compute/cuda/9.0/Prod/network_installers/cuda_9.0.176_win10_network-exe)). - -Before installing, please make sure you __close any running instances of Unity -or Visual Studio__. - -Run the installer and select the Express option. Note the directory where you -installed the CUDA toolkit. In this guide, we installed in the directory -`C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v9.0` - -### Install Nvidia cuDNN library - -[Download](https://developer.nvidia.com/cudnn) and install the cuDNN library -from Nvidia. cuDNN is a GPU-accelerated library of primitives for deep neural -networks. Before you can download, you will need to sign up for free to the -Nvidia Developer Program. - -

- cuDNN membership required -

- -Once you've signed up, go back to the cuDNN -[downloads page](https://developer.nvidia.com/cudnn). -You may or may not be asked to fill out a short survey. When you get to the list -cuDNN releases, __make sure you are downloading the right version for the CUDA -toolkit you installed in Step 1.__ In this guide, we are using version 7.0.5 for -CUDA toolkit version 9.0 -([direct link](https://developer.nvidia.com/compute/machine-learning/cudnn/secure/v7.0.5/prod/9.0_20171129/cudnn-9.0-windows10-x64-v7)). - -After you have downloaded the cuDNN files, you will need to extract the files -into the CUDA toolkit directory. In the cuDNN zip file, there are three folders -called `bin`, `include`, and `lib`. - -

- cuDNN zip files -

- -Copy these three folders into the CUDA toolkit directory. The CUDA toolkit -directory is located at -`C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v9.0` - -

- cuda toolkit directory -

- -### Set Environment Variables - -You will need to add one environment variable and two path variables. - -To set the environment variable, type `environment variables` in the search bar -(this can be reached by hitting the Windows key or the bottom left Windows -button). You should see an option called __Edit the system environment -variables__. - -

- edit env variables -

- -From here, click the __Environment Variables__ button. Click __New__ to add a -new system variable _(make sure you do this under __System variables__ and not -User variables_. - -

- new system variable -

- -For __Variable Name__, enter `CUDA_HOME`. For the variable value, put the -directory location for the CUDA toolkit. In this guide, the directory location -is `C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v9.0`. Press __OK__ once. - -

- system variable names and values -

- -To set the two path variables, inside the same __Environment Variables__ window -and under the second box called __System Variables__, find a variable called -`Path` and click __Edit__. You will add two directories to the list. For this -guide, the two entries would look like: - -```console -C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v9.0\lib\x64 -C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v9.0\extras\CUPTI\libx64 -``` - -Make sure to replace the relevant directory location with the one you have -installed. _Please note that case sensitivity matters_. - -

- Path variables -

- -### Install TensorFlow GPU - -Next, install `tensorflow-gpu` using `pip`. You'll need version 1.7.1. In an -Anaconda Prompt with the Conda environment ml-agents activated, type in the -following command to uninstall TensorFlow for cpu and install TensorFlow -for gpu _(make sure you are connected to the Internet)_: - -```sh -pip uninstall tensorflow -pip install tensorflow-gpu==1.7.1 -``` - -Lastly, you should test to see if everything installed properly and that -TensorFlow can identify your GPU. In the same Anaconda Prompt, open Python -in the Prompt by calling: - -```sh -python -``` - -And then type the following commands: - -```python -import tensorflow as tf - -sess = tf.Session(config=tf.ConfigProto(log_device_placement=True)) -``` - -You should see something similar to: - -```console -Found device 0 with properties ... -``` - -## Acknowledgments - -We would like to thank -[Jason Weimann](https://unity3d.college/2017/10/25/machine-learning-in-unity3d-setting-up-the-environment-tensorflow-for-agentml-on-windows-10/) -and -[Nitish S. Mutha](http://blog.nitishmutha.com/tensorflow/2017/01/22/TensorFlow-with-gpu-for-windows.html) -for writing the original articles which were used to create this guide. diff --git a/docs/Installation.md b/docs/Installation.md index 29a3213e29..ac5fcdff67 100644 --- a/docs/Installation.md +++ b/docs/Installation.md @@ -16,12 +16,10 @@ Build Support_ component when installing Unity. width="500" border="10" />

-## Windows Users -For setting up your environment on Windows, we have created a [detailed -guide](Installation-Windows.md) to setting up your env. For Mac and Linux, -continue with this guide. - -## Mac and Unix Users +## Environment Setup +We now support a single mechanism for installing ML-Agents on Mac/Windows/Linux using Virtual +Environments. For more information on Virtual Environments and installation instructions, +follow this [guide](Using-Virtual-Environment.md). ### Clone the ML-Agents Toolkit Repository @@ -35,17 +33,17 @@ The `UnitySDK` subdirectory contains the Unity Assets to add to your projects. It also contains many [example environments](Learning-Environment-Examples.md) to help you get started. -The `ml-agents` subdirectory contains a Python package which provides deep reinforcement +The `ml-agents` subdirectory contains a Python package which provides deep reinforcement learning trainers to use with Unity environments. The `ml-agents-envs` subdirectory contains a Python API to interface with Unity, which -the `ml-agents` package depends on. +the `ml-agents` package depends on. The `gym-unity` subdirectory contains a package to interface with OpenAI Gym. ### Install Python and mlagents Package -In order to use ML-Agents toolkit, you need Python 3.6.1 or higher. +In order to use ML-Agents toolkit, you need Python 3.6.1 or higher. [Download](https://www.python.org/downloads/) and install the latest version of Python if you do not already have it. If your Python environment doesn't include `pip3`, see these @@ -58,10 +56,10 @@ To install the `mlagents` Python package, run from the command line: pip3 install mlagents ``` -Note that this will install `ml-agents` from PyPi, _not_ from the cloned repo. +Note that this will install `ml-agents` from PyPi, _not_ from the cloned repo. If you installed this correctly, you should be able to run `mlagents-learn --help`, after which you will see the Unity logo and the command line -parameters you can use with `mlagents-learn`. +parameters you can use with `mlagents-learn`. By installing the `mlagents` package, the dependencies listed in the [setup.py file](../ml-agents/setup.py) are also installed. Some of the primary dependencies include: @@ -79,7 +77,7 @@ Some of the primary dependencies include: ### Installing for Development -If you intend to make modifications to `ml-agents` or `ml-agents-envs`, you should install +If you intend to make modifications to `ml-agents` or `ml-agents-envs`, you should install the packages from the cloned repo rather than from PyPi. To do this, you will need to install `ml-agents` and `ml-agents-envs` separately. From the repo's root directory, run: @@ -93,13 +91,8 @@ pip3 install -e ./ Running pip with the `-e` flag will let you make changes to the Python files directly and have those reflected when you run `mlagents-learn`. It is important to install these packages in this order as the -`mlagents` package depends on `mlagents_envs`, and installing it in the other -order will download `mlagents_envs` from PyPi. - -## Docker-based Installation - -If you'd like to use Docker for ML-Agents, please follow -[this guide](Using-Docker.md). +`mlagents` package depends on `mlagents_envs`, and installing it in the other +order will download `mlagents_envs` from PyPi. ## Next Steps diff --git a/docs/Learning-Environment-Best-Practices.md b/docs/Learning-Environment-Best-Practices.md index 09c90f54bb..95e52b33ab 100644 --- a/docs/Learning-Environment-Best-Practices.md +++ b/docs/Learning-Environment-Best-Practices.md @@ -8,9 +8,10 @@ lessons which progressively increase in difficulty are presented to the agent ([learn more here](Training-Curriculum-Learning.md)). * When possible, it is often helpful to ensure that you can complete the task by - using a Player Brain to control the agent. -* It is often helpful to make many copies of the agent, and attach the Brain to - be trained to all of these agents. In this way the Brain can get more feedback + using a heuristic to control the agent. To do so, check the `Use Heuristic` + checkbox on the Agent and implement the `Heuristic()` method on the Agent. +* It is often helpful to make many copies of the agent, and give them the same + `Behavior Name`. In this way the learning process can get more feedback information from all of these agents, which helps it train faster. ## Rewards diff --git a/docs/Learning-Environment-Create-New.md b/docs/Learning-Environment-Create-New.md index 4eb497dd8f..1fa2f297da 100644 --- a/docs/Learning-Environment-Create-New.md +++ b/docs/Learning-Environment-Create-New.md @@ -21,17 +21,13 @@ steps: containing the environment. Your Academy class can implement a few optional methods to update the scene independently of any agents. For example, you can add, move, or delete agents and other entities in the environment. -3. Create one or more Brain assets by clicking **Assets** > **Create** > - **ML-Agents** > **Brain**, and naming them appropriately. -4. Implement your Agent subclasses. An Agent subclass defines the code an Agent +3. Implement your Agent subclasses. An Agent subclass defines the code an Agent uses to observe its environment, to carry out assigned actions, and to calculate the rewards used for reinforcement training. You can also implement optional methods to reset the Agent when it has finished or failed its task. -5. Add your Agent subclasses to appropriate GameObjects, typically, the object +4. Add your Agent subclasses to appropriate GameObjects, typically, the object in the scene that represents the Agent in the simulation. Each Agent object must be assigned a Brain object. -6. If training, check the `Control` checkbox in the BroadcastHub of the Academy. - [run the training process](Training-ML-Agents.md). **Note:** If you are unfamiliar with Unity, refer to [Learning the interface](https://docs.unity3d.com/Manual/LearningtheInterface.html) @@ -46,12 +42,12 @@ The first task to accomplish is simply creating a new Unity project and importing the ML-Agents assets into it: 1. Launch the Unity Editor and create a new project named "RollerBall". -2. Make sure that the Scripting Runtime Version for the project is set to use - **.NET 4.x Equivalent** (This is an experimental option in Unity 2017, +2. Make sure that the Scripting Runtime Version for the project is set to use + **.NET 4.x Equivalent** (This is an experimental option in Unity 2017, but is the default as of 2018.3.) 3. In a file system window, navigate to the folder containing your cloned ML-Agents repository. -4. Drag the `ML-Agents` and `Gizmos` folders from `UnitySDK/Assets` to the Unity +4. Drag the `ML-Agents` folder from `UnitySDK/Assets` to the Unity Editor Project window. Your Unity **Project** window should contain the following assets: @@ -138,7 +134,7 @@ Next, edit the new `RollerAcademy` script: 1. In the Unity Project window, double-click the `RollerAcademy` script to open it in your code editor. (By default new scripts are placed directly in the **Assets** folder.) -2. In the code editor, add the statement, `using MLAgents;`. +2. In the code editor, add the statement, `using MLAgents;`. 3. Change the base class from `MonoBehaviour` to `Academy`. 4. Delete the `Start()` and `Update()` methods that were added by default. @@ -154,29 +150,10 @@ public class RollerAcademy : Academy { } The default settings for the Academy properties are also fine for this environment, so we don't need to change anything for the RollerAcademy component -in the Inspector window. You may not have the RollerBrain in the Broadcast Hub yet, -more on that later. +in the Inspector window. ![The Academy properties](images/mlagents-NewTutAcademy.png) -## Add Brain Assets - -The Brain object encapsulates the decision making process. An Agent sends its -observations to its Brain and expects a decision in return. The type of the Brain -(Learning, Heuristic or Player) determines how the Brain makes decisions. -To create the Brain: - -1. Go to **Assets** > **Create** > **ML-Agents** and select the type of Brain asset - you want to create. For this tutorial, create a **Learning Brain** and - a **Player Brain**. -2. Name them `RollerBallBrain` and `RollerBallPlayer` respectively. - -![Creating a Brain Asset](images/mlagents-NewTutBrain.png) - -We will come back to the Brain properties later, but leave the Model property -of the `RollerBallBrain` as `None` for now. We will need to first train a -model before we can add it to the **Learning Brain**. - ## Implement an Agent To create the Agent: @@ -191,7 +168,7 @@ Then, edit the new `RollerAgent` script: 1. In the Unity Project window, double-click the `RollerAgent` script to open it in your code editor. -2. In the editor, add the `using MLAgents;` statement and then change the base +2. In the editor, add the `using MLAgents;` statement and then change the base class from `MonoBehaviour` to `Agent`. 3. Delete the `Update()` method, but we will use the `Start()` function, so leave it alone for now. @@ -218,7 +195,7 @@ stores a GameObject's position, orientation and scale in the 3D world). To get this reference, add a public field of type `Transform` to the RollerAgent class. Public fields of a component in Unity get displayed in the Inspector window, allowing you to choose which GameObject to use as the target in the Unity -Editor. +Editor. To reset the Agent's velocity (and later to apply force to move the agent) we need a reference to the Rigidbody component. A @@ -276,13 +253,13 @@ calculate an analytical solution to the problem. In our case, the information our Agent collects includes: -* Position of the target. +* Position of the target. ```csharp AddVectorObs(Target.position); ``` -* Position of the Agent itself. +* Position of the Agent itself. ```csharp AddVectorObs(this.transform.position); @@ -344,15 +321,15 @@ rBody.AddForce(controlSignal * speed); ### Rewards Reinforcement learning requires rewards. Assign rewards in the `AgentAction()` -function. The learning algorithm uses the rewards assigned to the Agent during +function. The learning algorithm uses the rewards assigned to the Agent during the simulation and learning process to determine whether it is giving the Agent the optimal actions. You want to reward an Agent for completing the -assigned task. In this case, the Agent is given a reward of 1.0 for reaching the +assigned task. In this case, the Agent is given a reward of 1.0 for reaching the Target cube. The RollerAgent calculates the distance to detect when it reaches the target. When it does, the code calls the `Agent.SetReward()` method to assign a -reward of 1.0 and marks the agent as finished by calling the `Done()` method +reward of 1.0 and marks the agent as finished by calling the `Done()` method on the Agent. ```csharp @@ -425,62 +402,51 @@ window. ## Final Editor Setup Now, that all the GameObjects and ML-Agent components are in place, it is time -to connect everything together in the Unity Editor. This involves assigning the -Brain asset to the Agent, changing some of the Agent Component's properties, and -setting the Brain properties so that they are compatible with our Agent code. +to connect everything together in the Unity Editor. This involves +changing some of the Agent Component's properties so that they are compatible +with our Agent code. -1. In the Academy Inspector, add the `RollerBallBrain` and `RollerBallPlayer` - Brains to the **Broadcast Hub**. -2. Select the **RollerAgent** GameObject to show its properties in the Inspector +1. Select the **RollerAgent** GameObject to show its properties in the Inspector window. -3. Drag the Brain **RollerBallPlayer** from the Project window to the - RollerAgent **Brain** field. -4. Change **Decision Interval** from `1` to `10`. -5. Drag the Target GameObject from the Hierarchy window to the RollerAgent +2. Change **Decision Interval** from `1` to `10`. +3. Drag the Target GameObject from the Hierarchy window to the RollerAgent Target field. +4. Modify the Behavior Parameters of the Agent : + * `Behavior Name` to *RollerBallBrain* + * `Vector Observation` `Space Size` = 8 + * `Vector Action` `Space Type` = **Continuous** + * `Vector Action` `Space Size` = 2 -![Assign the Brain to the RollerAgent](images/mlagents-NewTutAssignBrain.png) +Now you are ready to test the environment before training. -Finally, select the **RollerBallBrain** Asset in the **Project** window so that you can -see its properties in the Inspector window. Set the following properties: +## Testing the Environment -* `Vector Observation` `Space Size` = 8 -* `Vector Action` `Space Type` = **Continuous** -* `Vector Action` `Space Size` = 2 +It is always a good idea to test your environment manually before embarking on +an extended training run. To do so, you will need to implement the `Heuristic()` +method on the RollerAgent class. This will allow you control the Agent using +direct keyboard control. -Select the **RollerBallPlayer** Asset in the **Project** window and set the same -property values. +The `Heuristic()` method will look like this : -Now you are ready to test the environment before training. +```csharp + public override float[] Heuristic() + { + var action = new float[2]; + action[0] = Input.GetAxis("Horizontal"); + action[1] = Input.GetAxis("Vertical"); + return action; + } +``` -## Testing the Environment +What this code means is that the heuristic will generate an action corresponding +to the values of the "Horizontal" and "Vertical" input axis (which correspond to +the keyboard arrow keys). -It is always a good idea to test your environment manually before embarking on -an extended training run. The reason we have created the `RollerBallPlayer` Brain -is so that we can control the Agent using direct keyboard -control. But first, you need to define the keyboard to action mapping. Although -the RollerAgent only has an `Action Size` of two, we will use one key to specify -positive values and one to specify negative values for each action, for a total -of four keys. - -1. Select the `RollerBallPlayer` Asset to view its properties in the Inspector. -2. Expand the **Key Continuous Player Actions** dictionary (only visible when using - a **PlayerBrain**). -3. Set **Size** to 4. -4. Set the following mappings: - -| Element | Key | Index | Value | -| :------------ | :---: | :------: | :------: | -| Element 0 | D | 0 | 1 | -| Element 1 | A | 0 | -1 | -| Element 2 | W | 1 | 1 | -| Element 3 | S | 1 | -1 | - -The **Index** value corresponds to the index of the action array passed to -`AgentAction()` function. **Value** is assigned to action[Index] when **Key** is -pressed. - -Press **Play** to run the scene and use the WASD keys to move the Agent around +In order for the Agent to use the Heuristic, You will need to check the `Use Heuristic` +checkbox in the `Behavior Parameters` of the RollerAgent. + + +Press **Play** to run the scene and use the arrows keys to move the Agent around the platform. Make sure that there are no errors displayed in the Unity editor Console window and that the Agent resets when it reaches its target or falls from the platform. Note that for more involved debugging, the ML-Agents SDK @@ -495,96 +461,93 @@ environment. ## Training the Environment -Now you can train the Agent. To get ready for training, you must first drag the -`RollerBallBrain` asset to the **RollerAgent** GameObject `Brain` field to change to the learning brain. -Then, select the Academy GameObject and check the `Control` checkbox for -the RollerBallBrain item in the **Broadcast Hub** list. From there, the process is -the same as described in [Training ML-Agents](Training-ML-Agents.md). Note that the +The process is +the same as described in [Training ML-Agents](Training-ML-Agents.md). Note that the models will be created in the original ml-agents project folder, `ml-agents/models`. The hyperparameters for training are specified in the configuration file that you -pass to the `mlagents-learn` program. Using the default settings specified +pass to the `mlagents-learn` program. Using the default settings specified in the original `ml-agents/config/trainer_config.yaml` file, the -RollerAgent takes about 300,000 steps to train. However, you can change the +RollerAgent takes about 300,000 steps to train. However, you can change the following hyperparameters to speed up training considerably (to under 20,000 steps): batch_size: 10 buffer_size: 100 -Since this example creates a very simple training environment with only a few inputs -and outputs, using small batch and buffer sizes speeds up the training considerably. -However, if you add more complexity to the environment or change the reward or -observation functions, you might also find that training performs better with different +Since this example creates a very simple training environment with only a few inputs +and outputs, using small batch and buffer sizes speeds up the training considerably. +However, if you add more complexity to the environment or change the reward or +observation functions, you might also find that training performs better with different hyperparameter values. -**Note:** In addition to setting these hyperparameter values, the Agent +**Note:** In addition to setting these hyperparameter values, the Agent **DecisionFrequency** parameter has a large effect on training time and success. A larger value reduces the number of decisions the training algorithm has to consider and, -in this simple environment, speeds up training. +in this simple environment, speeds up training. -To train in the editor, run the following Python command from a Terminal or Console +To train in the editor, run the following Python command from a Terminal or Console window before pressing play: mlagents-learn config/config.yaml --run-id=RollerBall-1 --train -(where `config.yaml` is a copy of `trainer_config.yaml` that you have edited -to change the `batch_size` and `buffer_size` hyperparameters for your brain.) +(where `config.yaml` is a copy of `trainer_config.yaml` that you have edited +to change the `batch_size` and `buffer_size` hyperparameters for your trainer.) -**Note:** If you get a `command not found` error when running this command, make sure -that you have followed the *Install Python and mlagents Package* section of the +**Note:** If you get a `command not found` error when running this command, make sure +that you have followed the *Install Python and mlagents Package* section of the ML-Agents [Installation](Installation.md) instructions. -To monitor the statistics of Agent performance during training, use -[TensorBoard](Using-Tensorboard.md). +To monitor the statistics of Agent performance during training, use +[TensorBoard](Using-Tensorboard.md). ![TensorBoard statistics display](images/mlagents-RollerAgentStats.png) -In particular, the *cumulative_reward* and *value_estimate* statistics show how -well the Agent is achieving the task. In this example, the maximum reward an +In particular, the *cumulative_reward* and *value_estimate* statistics show how +well the Agent is achieving the task. In this example, the maximum reward an Agent can earn is 1.0, so these statistics approach that value when the Agent has successfully *solved* the problem. -**Note:** If you use TensorBoard, always increment or change the `run-id` -you pass to the `mlagents-learn` command for each training run. If you use -the same id value, the statistics for multiple runs are combined and become +**Note:** If you use TensorBoard, always increment or change the `run-id` +you pass to the `mlagents-learn` command for each training run. If you use +the same id value, the statistics for multiple runs are combined and become difficult to interpret. ## Optional: Multiple Training Areas within the Same Scene -In many of the [example environments](Learning-Environment-Examples.md), many copies of +In many of the [example environments](Learning-Environment-Examples.md), many copies of the training area are instantiated in the scene. This generally speeds up training, allowing the environment to gather many experiences in parallel. This can be achieved -simply by instantiating many Agents which share the same Brain. Use the following steps to -parallelize your RollerBall environment. +simply by instantiating many Agents which share the `Behavior Parameters`. Use the following steps to +parallelize your RollerBall environment. ### Instantiating Multiple Training Areas -1. Right-click on your Project Hierarchy and create a new empty GameObject. - Name it TrainingArea. -2. Reset the TrainingArea’s Transform so that it is at (0,0,0) with Rotation (0,0,0) - and Scale (1,1,1). -3. Drag the Floor, Target, and RollerAgent GameObjects in the Hierarchy into the - TrainingArea GameObject. -4. Drag the TrainingArea GameObject, along with its attached GameObjects, into your +1. Right-click on your Project Hierarchy and create a new empty GameObject. + Name it TrainingArea. +2. Reset the TrainingArea’s Transform so that it is at (0,0,0) with Rotation (0,0,0) + and Scale (1,1,1). +3. Drag the Floor, Target, and RollerAgent GameObjects in the Hierarchy into the + TrainingArea GameObject. +4. Drag the TrainingArea GameObject, along with its attached GameObjects, into your Assets browser, turning it into a prefab. -5. You can now instantiate copies of the TrainingArea prefab. Drag them into your scene, - positioning them so that they do not overlap. +5. You can now instantiate copies of the TrainingArea prefab. Drag them into your scene, + positioning them so that they do not overlap. -### Editing the Scripts +### Editing the Scripts -You will notice that in the previous section, we wrote our scripts assuming that our -TrainingArea was at (0,0,0), performing checks such as `this.transform.position.y < 0` -to determine whether our agent has fallen off the platform. We will need to change -this if we are to use multiple TrainingAreas throughout the scene. +You will notice that in the previous section, we wrote our scripts assuming that our +TrainingArea was at (0,0,0), performing checks such as `this.transform.position.y < 0` +to determine whether our agent has fallen off the platform. We will need to change +this if we are to use multiple TrainingAreas throughout the scene. -A quick way to adapt our current code is to use -localPosition rather than position, so that our position reference is in reference -to the prefab TrainingArea's location, and not global coordinates. +A quick way to adapt our current code is to use +localPosition rather than position, so that our position reference is in reference +to the prefab TrainingArea's location, and not global coordinates. 1. Replace all references of `this.transform.position` in RollerAgent.cs with `this.transform.localPosition`. 2. Replace all references of `Target.position` in RollerAgent.cs with `Target.localPosition`. -This is only one way to achieve this objective. Refer to the +This is only one way to achieve this objective. Refer to the [example environments](Learning-Environment-Examples.md) for other ways we can achieve relative positioning. ## Review: Scene Layout @@ -593,11 +556,12 @@ This section briefly reviews how to organize your scene when using Agents in your Unity environment. There are two kinds of game objects you need to include in your scene in order -to use Unity ML-Agents: an Academy and one or more Agents. You also need to -have brain assets linked appropriately to your Agents and to the Academy. +to use Unity ML-Agents: an Academy and one or more Agents. Keep in mind: * There can only be one Academy game object in a scene. -* You can only train Learning Brains that have been added to the Academy's Broadcast Hub list. +* If you are using multiple training areas, make sure all the Agents have the same `Behavior Name` +and `Behavior Parameters` + diff --git a/docs/Learning-Environment-Design-Academy.md b/docs/Learning-Environment-Design-Academy.md index 954a9463d4..0d324c127a 100644 --- a/docs/Learning-Environment-Design-Academy.md +++ b/docs/Learning-Environment-Design-Academy.md @@ -1,6 +1,6 @@ # Creating an Academy -An Academy orchestrates all the Agent and Brain objects in a Unity scene. Every +An Academy orchestrates all the Agent objects in a Unity scene. Every scene containing Agents must contain a single Academy. To use an Academy, you must create your own subclass. However, all the methods you can override are optional. @@ -50,10 +50,6 @@ logic for creating them in the `AcademyStep()` function. ## Academy Properties ![Academy Inspector](images/academy.png) -* `Broadcast Hub` - Gathers the Brains that will communicate with the external - process. Any Brain added to the Broadcast Hub will be visible from the external - process. In addition, if the checkbox `Control` is checked, the Brain will be - controllable from the external process and will thus be trainable. * `Configuration` - The engine-level settings which correspond to rendering quality and engine speed. * `Width` - Width of the environment window in pixels. diff --git a/docs/Learning-Environment-Design-Agents.md b/docs/Learning-Environment-Design-Agents.md index e83c62642c..8e6f42a539 100644 --- a/docs/Learning-Environment-Design-Agents.md +++ b/docs/Learning-Environment-Design-Agents.md @@ -7,23 +7,21 @@ successfully learn are the observations the agent collects for reinforcement learning and the reward you assign to estimate the value of the agent's current state toward accomplishing its tasks. -An Agent passes its observations to its Brain. The Brain, then, makes a decision +An Agent passes its observations to its Policy. The Policy, then, makes a decision and passes the chosen action back to the agent. Your agent code must execute the action, for example, move the agent in one direction or another. In order to [train an agent using reinforcement learning](Learning-Environment-Design.md), your agent must calculate a reward value at each action. The reward is used to -discover the optimal decision-making policy. (A reward is not used by already -trained agents or for imitation learning.) - -The Brain class abstracts out the decision making logic from the Agent itself so -that you can use the same Brain in multiple Agents. How a Brain makes its -decisions depends on the kind of Brain it is. A Player Brain allows you -to directly control the agent. A Heuristic Brain allows you to create a -decision script to control the agent with a set of rules. These two Brains -do not involve neural networks but they can be useful for debugging. The -Learning Brain allows you to train and use neural network models for -your Agents. See [Brains](Learning-Environment-Design-Brains.md). - +discover the optimal decision-making policy. + +The Policy class abstracts out the decision making logic from the Agent itself so +that you can use the same Policy in multiple Agents. How a Policy makes its +decisions depends on the kind of Policy it is. You can change the Policy of an +Agent by changing its `Behavior Parameters`. If you check `Use Heuristic`, the +Agent will use its `Heuristic()` method to make decisions which can allow you to +control the Agent manually or write your own Policy. If the Agent has a `Model` +file, it Policy will use the neural network `Model` to take decisions. + ## Decisions The observation-decision-action-reward cycle repeats after a configurable number @@ -35,17 +33,17 @@ respond to specific events or take actions of variable duration. For example, an agent in a robotic simulator that must provide fine-control of joint torques should make its decisions every step of the simulation. On the other hand, an agent that only needs to make decisions when certain game or simulation events -occur, should use on-demand decision making. +occur, should use on-demand decision making. To control the frequency of step-based decision making, set the **Decision Frequency** value for the Agent object in the Unity Inspector window. Agents -using the same Brain instance can use a different frequency. During simulation +using the same Model can use a different frequency. During simulation steps in which no decision is requested, the Agent receives the same action chosen by the previous decision. ### On Demand Decision Making -On demand decision making allows Agents to request decisions from their Brains +On demand decision making allows Agents to request decisions from their Policies only when needed instead of receiving decisions at a fixed frequency. This is useful when the agents commit to an action for a variable number of steps or when the agents cannot make decisions at the same time. This typically the case @@ -54,9 +52,10 @@ agents can take actions of variable duration. When you turn on **On Demand Decisions** for an Agent, your agent code must call the `Agent.RequestDecision()` function. This function call starts one iteration -of the observation-decision-action-reward cycle. The Brain invokes the Agent's -`CollectObservations()` method, makes a decision and returns it by calling the -`AgentAction()` method. The Brain waits for the Agent to request the next +of the observation-decision-action-reward cycle. The Agent's +`CollectObservations()` method is called, the Policy makes a decision and +returns it by calling the +`AgentAction()` method. The Policy waits for the Agent to request the next decision before starting another iteration. ## Observations @@ -70,16 +69,16 @@ state of the world. A state observation can take the following forms: When you use vector observations for an Agent, implement the `Agent.CollectObservations()` method to create the feature vector. When you use -**Visual Observations**, you only need to identify which Unity Camera objects -or RenderTextures will provide images and the base Agent class handles the rest. -You do not need to implement the `CollectObservations()` method when your Agent +**Visual Observations**, you only need to identify which Unity Camera objects +or RenderTextures will provide images and the base Agent class handles the rest. +You do not need to implement the `CollectObservations()` method when your Agent uses visual observations (unless it also uses vector observations). ### Vector Observation Space: Feature Vectors For agents using a continuous state space, you create a feature vector to -represent the agent's observation at each step of the simulation. The Brain -class calls the `CollectObservations()` method of each of its Agents. Your +represent the agent's observation at each step of the simulation. The Policy +class calls the `CollectObservations()` method of each Agent. Your implementation of this function must call `AddVectorObs` to add vector observations. @@ -122,7 +121,7 @@ with zeros for any missing entities in a specific observation or you can limit an agent's observations to a fixed subset. For example, instead of observing every enemy agent in an environment, you could only observe the closest five. -When you set up an Agent's Brain in the Unity Editor, set the following +When you set up an Agent's `Behavior Parameters` in the Unity Editor, set the following properties to use a continuous vector observation: * **Space Size** — The state size must match the length of your feature vector. @@ -200,23 +199,24 @@ used in your normalization formula. ### Multiple Visual Observations -Visual observations use rendered textures directly or from one or more -cameras in a scene. The Brain vectorizes the textures into a 3D Tensor which -can be fed into a convolutional neural network (CNN). For more information on -CNNs, see [this guide](http://cs231n.github.io/convolutional-networks/). You +Visual observations use rendered textures directly or from one or more +cameras in a scene. The Policy vectorizes the textures into a 3D Tensor which +can be fed into a convolutional neural network (CNN). For more information on +CNNs, see [this guide](http://cs231n.github.io/convolutional-networks/). You can use visual observations along side vector observations. -Agents using visual observations can capture state of arbitrary complexity and -are useful when the state is difficult to describe numerically. However, they -are also typically less efficient and slower to train, and sometimes don't +Agents using visual observations can capture state of arbitrary complexity and +are useful when the state is difficult to describe numerically. However, they +are also typically less efficient and slower to train, and sometimes don't succeed at all. -Visual observations can be derived from Cameras or RenderTextures within your scene. -To add a visual observation to an Agent, either click on the `Add Camera` or -`Add RenderTexture` button in the Agent inspector. Then drag the camera or -render texture you want to add to the `Camera` or `RenderTexture` field. -You can have more than one camera or render texture and even use a combination -of both attached to an Agent. +Visual observations can be derived from Cameras or RenderTextures within your scene. +To add a visual observation to an Agent, add either a Camera Sensor Component +or RenderTextures Sensor Component to the Agent. Then drag the camera or +render texture you want to add to the `Camera` or `RenderTexture` field. +You can have more than one camera or render texture and even use a combination +of both attached to an Agent. For each visual observation, set the width and height +of the image (in pixels) and whether or not the observation is color or grayscale. ![Agent Camera](images/visual-observation.png) @@ -224,42 +224,30 @@ or ![Agent RenderTexture](images/visual-observation-rendertexture.png) -In addition, make sure that the Agent's Brain expects a visual observation. In -the Brain inspector, under **Brain Parameters** > **Visual Observations**, -specify the number of Resolutions the Agent is using for its visual observations. -For each visual observation, set the width and height of the image (in pixels) -and whether or not the observation is color or grayscale (when `Black And White` -is checked). - -For instance, if you are using two cameras and one render texture on your Agent, -three **Visual Observations** have to be added to the **Brain Parameters**. -During runtime, if a combination of `Cameras` and `RenderTextures` is used, all -cameras are captured first, then all render textures will be added, in the -order they appear in the editor. - -![Agent Camera and RenderTexture combination](images/visual-observation-combination.png) +Each Agent that uses the same Policy must have the same number of visual observations, +and they must all have the same resolutions (including whether or not they are grayscale). +Additionally, each Sensor Component on an Agent must have a unique name so that they can +be sorted deterministically (the name must be unique for that Agent, but multiple Agents can +have a Sensor Component with the same name). -RenderTexture observations will throw an `Exception` if the width/height doesn't -match the resolution specified under **Brain Parameters** > **Visual Observations**. - -When using `RenderTexture` visual observations, a handy feature for debugging is -adding a `Canvas`, then adding a `Raw Image` with it's texture set to the Agent's -`RenderTexture`. This will render the agent observation on the game screen. +When using `RenderTexture` visual observations, a handy feature for debugging is +adding a `Canvas`, then adding a `Raw Image` with it's texture set to the Agent's +`RenderTexture`. This will render the agent observation on the game screen. ![RenderTexture with Raw Image](images/visual-observation-rawimage.png) -The [GridWorld environment](Learning-Environment-Examples.md#gridworld) -is an example on how to use a RenderTexure for both debugging and observation. Note -that in this example, a Camera is rendered to a RenderTexture, which is then used for -observations and debugging. To update the RenderTexture, the Camera must be asked to -render every time a decision is requested within the game code. When using Cameras +The [GridWorld environment](Learning-Environment-Examples.md#gridworld) +is an example on how to use a RenderTexture for both debugging and observation. Note +that in this example, a Camera is rendered to a RenderTexture, which is then used for +observations and debugging. To update the RenderTexture, the Camera must be asked to +render every time a decision is requested within the game code. When using Cameras as observations directly, this is done automatically by the Agent. ![Agent RenderTexture Debug](images/gridworld.png) ## Vector Actions -An action is an instruction from the Brain that the agent carries out. The +An action is an instruction from the Policy that the agent carries out. The action is passed to the Agent as a parameter when the Academy invokes the agent's `AgentAction()` function. When you specify that the vector action space is **Continuous**, the action parameter passed to the Agent is an array of @@ -270,12 +258,9 @@ of commands. In the **Discrete** vector action space type, the action parameter is an array of indices. The number of indices in the array is determined by the number of branches defined in the `Branches Size` property. Each branch corresponds to an action table, you can specify the size of each table by -modifying the `Branches` property. The `Branch Descriptions` property holds the names -for each available branch. Set the `Vector Action Space Size` and -`Vector Action Space Type` properties on the Brain object assigned to the Agent -(using the Unity Editor Inspector window). +modifying the `Branches` property. -Neither the Brain nor the training algorithm know anything about what the action +Neither the Policy nor the training algorithm know anything about what the action values themselves mean. The training algorithm simply tries different values for the action list and observes the affect on the accumulated rewards over time and many training episodes. Thus, the only place actions are defined for an Agent is @@ -287,17 +272,18 @@ then apply the received values appropriately (and consistently) in For example, if you designed an agent to move in two dimensions, you could use either continuous or the discrete vector actions. In the continuous case, you would set the vector action size to two (one for each dimension), and the -agent's Brain would create an action with two floating point values. In the +agent's Policy would create an action with two floating point values. In the discrete case, you would use one Branch with a size of four (one for each -direction), and the Brain would create an action array containing a single +direction), and the Policy would create an action array containing a single element with a value ranging from zero to three. Alternatively, you could create two branches of size two (one for horizontal movement and one for vertical -movement), and the Brain would create an action array containing two elements +movement), and the Policy would create an action array containing two elements with values ranging from zero to one. Note that when you are programming actions for an agent, it is often helpful to -test your action logic using a **Player** Brain, which lets you map keyboard -commands to actions. See [Brains](Learning-Environment-Design-Brains.md). +test your action logic using the `Heuristic()` method of the Agent, +which lets you map keyboard +commands to actions. The [3DBall](Learning-Environment-Examples.md#3dball-3d-balance-ball) and [Area](Learning-Environment-Examples.md#push-block) example environments are set @@ -305,9 +291,9 @@ up to use either the continuous or the discrete vector action spaces. ### Continuous Action Space -When an Agent uses a Brain set to the **Continuous** vector action space, the +When an Agent uses a Policy set to the **Continuous** vector action space, the action parameter passed to the Agent's `AgentAction()` function is an array with -length equal to the Brain object's `Vector Action Space Size` property value. +length equal to the `Vector Action Space Size` property value. The individual values in the array have whatever meanings that you ascribe to them. If you assign an element in the array as the speed of an Agent, for example, the training process learns to control the speed of the Agent through @@ -340,7 +326,7 @@ As shown above, you can scale the control values as needed after clamping them. ### Discrete Action Space -When an Agent uses a Brain set to the **Discrete** vector action space, the +When an Agent uses a **Discrete** vector action space, the action parameter passed to the Agent's `AgentAction()` function is an array containing indices. With the discrete vector action space, `Branches` is an array of integers, each value corresponds to the number of possibilities for @@ -380,9 +366,9 @@ continuous action spaces. #### Masking Discrete Actions When using Discrete Actions, it is possible to specify that some actions are -impossible for the next decision. Then the Agent is controlled by a -Learning Brain, the Agent will be unable to perform the specified action. Note -that when the Agent is controlled by a Player or Heuristic Brain, the Agent will +impossible for the next decision. When the Agent is controlled by a +neural network, the Agent will be unable to perform the specified action. Note +that when the Agent is controlled by its Heuristic, the Agent will still be able to decide to perform the masked action. In order to mask an action, call the method `SetActionMask` within the `CollectObservation` method : @@ -422,14 +408,14 @@ the choices an agent makes such that the agent earns the highest cumulative reward over time. The better your reward mechanism, the better your agent will learn. -**Note:** Rewards are not used during inference by a Brain using an already -trained policy and is also not used during imitation learning. +**Note:** Rewards are not used during inference by an Agent using a +trained model and is also not used during imitation learning. Perhaps the best advice is to start simple and only add complexity as needed. In general, you should reward results rather than actions you think will lead to the desired results. To help develop your rewards, you can use the Monitor class -to display the cumulative reward received by an Agent. You can even use a Player -Brain to control the Agent while watching how it accumulates rewards. +to display the cumulative reward received by an Agent. You can even use the +Agent's Heuristic to control the Agent while watching how it accumulates rewards. Allocate rewards to an Agent by calling the `AddReward()` method in the `AgentAction()` function. The reward assigned between each decision @@ -517,17 +503,33 @@ The `Ball3DAgent` also assigns a negative penalty when the ball falls off the platform. Note that all of these environments make use of the `Done()` method, which manually -terminates an episode when a termination condition is reached. This can be +terminates an episode when a termination condition is reached. This can be called independently of the `Max Step` property. ## Agent Properties -![Agent Inspector](images/agent.png) - -* `Brain` - The Brain to register this Agent to. Can be dragged into the - inspector using the Editor. -* `Visual Observations` - A list of `Cameras` or `RenderTextures` which will - be used to generate observations. +![Agent Inspector](images/3dball_learning_brain.png) + +* `Behavior Parameters` - The parameters dictating what Policy the Agent will +receive. + * `Vector Observation` + * `Space Size` - Length of vector observation for the Agent. + * `Stacked Vectors` - The number of previous vector observations that will + be stacked and used collectively for decision making. This results in the + effective size of the vector observation being passed to the Policy being: + _Space Size_ x _Stacked Vectors_. + * `Vector Action` + * `Space Type` - Corresponds to whether action vector contains a single + integer (Discrete) or a series of real-valued floats (Continuous). + * `Space Size` (Continuous) - Length of action vector. + * `Branches` (Discrete) - An array of integers, defines multiple concurrent + discrete actions. The values in the `Branches` array correspond to the + number of possible discrete values for each action branch. + * `Model` - The neural network model used for inference (obtained after + training) + * `Inference Device` - Whether to use CPU or GPU to run the model during inference + * `Use Heuristic` - If checked, the Agent will use its 'Heuristic()' method for + decisions. * `Max Step` - The per-agent maximum number of steps. Once this number is reached, the Agent will be reset if `Reset On Done` is checked. * `Reset On Done` - Whether the Agent's `AgentReset()` function should be called @@ -538,11 +540,11 @@ called independently of the `Max Step` property. Frequency` steps and perform an action every step. In the example above, `CollectObservations()` will be called every 5 steps and `AgentAction()` will be called at every step. This means that the Agent will reuse the - decision the Brain has given it. + decision the Policy has given it. * If checked, the Agent controls when to receive decisions, and take actions. To do so, the Agent may leverage one or two methods: * `RequestDecision()` Signals that the Agent is requesting a decision. This - causes the Agent to collect its observations and ask the Brain for a + causes the Agent to collect its observations and ask the Policy for a decision at the next step of the simulation. Note that when an Agent requests a decision, it also request an action. This is to ensure that all decisions lead to an action during training. @@ -559,31 +561,10 @@ Unity environment. While this was built for monitoring an agent's value function throughout the training process, we imagine it can be more broadly useful. You can learn more [here](Feature-Monitor.md). -## Instantiating an Agent at Runtime - -To add an Agent to an environment at runtime, use the Unity -`GameObject.Instantiate()` function. It is typically easiest to instantiate an -agent from a [Prefab](https://docs.unity3d.com/Manual/Prefabs.html) (otherwise, -you have to instantiate every GameObject and Component that make up your Agent -individually). In addition, you must assign a Brain instance to the new Agent -and initialize it by calling its `AgentReset()` method. For example, the -following function creates a new Agent given a Prefab, Brain instance, location, -and orientation: - -```csharp -private void CreateAgent(GameObject AgentPrefab, Brain brain, Vector3 position, Quaternion orientation) -{ - GameObject AgentObj = Instantiate(agentPrefab, position, orientation); - Agent Agent = AgentObj.GetComponent(); - Agent.GiveBrain(brain); - Agent.AgentReset(); -} -``` - ## Destroying an Agent Before destroying an Agent GameObject, you must mark it as done (and wait for -the next step in the simulation) so that the Brain knows that this Agent is no +the next step in the simulation) so that the Policy knows that this Agent is no longer active. Thus, the best place to destroy an Agent is in the `Agent.AgentOnDone()` function: diff --git a/docs/Learning-Environment-Design-Brains.md b/docs/Learning-Environment-Design-Brains.md deleted file mode 100644 index 60bb1ae1bf..0000000000 --- a/docs/Learning-Environment-Design-Brains.md +++ /dev/null @@ -1,96 +0,0 @@ -# Brains - -The Brain encapsulates the decision making process. Every Agent must be -assigned a Brain, but you can use the same Brain with more than one Agent. You -can also create several Brains, attach each of the Brain to one or more than one -Agent. - -There are 3 kinds of Brains you can use: - -* [Learning](Learning-Environment-Design-Learning-Brains.md) – Use a - **LearningBrain** to make use of a trained model or train a new model. -* [Heuristic](Learning-Environment-Design-Heuristic-Brains.md) – Use a - **HeuristicBrain** to hand-code the Agent's logic by extending the Decision class. -* [Player](Learning-Environment-Design-Player-Brains.md) – Use a - **PlayerBrain** to map keyboard keys to Agent actions, which can be - useful to test your Agent code. - -During training, use a **Learning Brain** -and drag it into the Academy's `Broadcast Hub` with the `Control` checkbox checked. -When you want to use the trained model, import the model file into the Unity -project, add it to the **Model** property of the **Learning Brain** and uncheck -the `Control` checkbox of the `Broadcast Hub`. - -Brain assets has several important properties that you can set using the -Inspector window. These properties must be appropriate for the Agents using the -Brain. For example, the `Vector Observation Space Size` property must match the -length of the feature vector created by an Agent exactly. See -[Agents](Learning-Environment-Design-Agents.md) for information about creating -agents and setting up a Brain instance correctly. - -## Brain Properties - -The Brain Inspector window in the Unity Editor displays the properties assigned -to a Brain component: - -![Brain Inspector](images/brain.png) - -* `Brain Parameters` - Define vector observations, visual observation, and - vector actions for the Brain. - * `Vector Observation` - * `Space Size` - Length of vector observation for Brain. - * `Stacked Vectors` - The number of previous vector observations that will - be stacked and used collectively for decision making. This results in the - effective size of the vector observation being passed to the Brain being: - _Space Size_ x _Stacked Vectors_. - * `Visual Observations` - Describes height, width, and whether to grayscale - visual observations for the Brain. - * `Vector Action` - * `Space Type` - Corresponds to whether action vector contains a single - integer (Discrete) or a series of real-valued floats (Continuous). - * `Space Size` (Continuous) - Length of action vector for Brain. - * `Branches` (Discrete) - An array of integers, defines multiple concurrent - discrete actions. The values in the `Branches` array correspond to the - number of possible discrete values for each action branch. - * `Action Descriptions` - A list of strings used to name the available - actions for the Brain. - -The other properties of the Brain depend on the type of Brain you are using. - -## Using the Broadcast Feature - -The Player, Heuristic and Learning Brains can support -broadcast to an external process. The broadcast feature allows you to collect data -from your Agents using a Python program without controlling them. - -### How to use: Unity - -To turn it on in Unity, drag the Brain into the Academy's Broadcast Hub but leave -the `Control` checkbox unchecked when present. This will expose the Brain's data -without letting the external process control it. - -![Broadcast](images/broadcast.png) - -### How to use: Python - -When you launch your Unity Environment from a Python program, you can see what -the Agents connected to Brains present in the `Broadcast Hub` are doing. -When calling `step` or -`reset` on your environment, you retrieve a dictionary mapping Brain names to -`BrainInfo` objects. The dictionary contains a `BrainInfo` object for each -Brain in the `Broadcast Hub`. - -Just like with a Learning Brain, the `BrainInfo` object contains the fields for -`visual_observations`, `vector_observations`, `text_observations`, -`memories`,`rewards`, `local_done`, `max_reached`, `agents` and -`previous_actions`. Note that `previous_actions` corresponds to the actions that -were taken by the Agents at the previous step, not the current one. - -Note that when you do a `step` on the environment, you can only provide actions -for the Brains in the `Broadcast Hub` with the `Control` checkbox checked. If there -are Brains in the `Broadcast Hub` with the -`Control` checkbox checked, simply call `step()` with no arguments. - -You can use the broadcast feature to collect data generated by Player, -Heuristics or Learning Brains game sessions. You can then use this data to train -an agent in a supervised context. diff --git a/docs/Learning-Environment-Design-Heuristic-Brains.md b/docs/Learning-Environment-Design-Heuristic-Brains.md deleted file mode 100644 index e089ae752c..0000000000 --- a/docs/Learning-Environment-Design-Heuristic-Brains.md +++ /dev/null @@ -1,41 +0,0 @@ -# Heuristic Brain - -The **Heuristic Brain** allows you to hand code an Agent's decision making -process. A Heuristic Brain requires an implementation of the Decision script -to which it delegates the decision making process. - -When you use a **Heuristic Brain**, you must add a decision script to the `Decision` -property of the **Heuristic Brain**. - -## Implementing the Decision interface - -```csharp -using UnityEngine; -using MLAgents; - -public class HeuristicLogic : Decision -{ - // ... -} -``` - -The Decision interface defines two methods, `Decide()` and `MakeMemory()`. - -The `Decide()` method receives an Agents current state, consisting of the -agent's observations, reward, memory and other aspects of the Agent's state, and -must return an array containing the action that the Agent should take. The -format of the returned action array depends on the **Vector Action Space Type**. -When using a **Continuous** action space, the action array is just a float array -with a length equal to the **Vector Action Space Size** setting. When using a -**Discrete** action space, the action array is an integer array with the same -size as the `Branches` array. In the discrete action space, the values of the -**Branches** array define the number of discrete values that your `Decide()` -function can return for each branch, which don't need to be consecutive -integers. - -The `MakeMemory()` function allows you to pass data forward to the next -iteration of an Agent's decision making process. The array you return from -`MakeMemory()` is passed to the `Decide()` function in the next iteration. You -can use the memory to allow the Agent's decision process to take past actions -and observations into account when making the current decision. If your -heuristic logic does not require memory, just return an empty array. diff --git a/docs/Learning-Environment-Design-Learning-Brains.md b/docs/Learning-Environment-Design-Learning-Brains.md deleted file mode 100644 index d2fbb87a51..0000000000 --- a/docs/Learning-Environment-Design-Learning-Brains.md +++ /dev/null @@ -1,71 +0,0 @@ -# Learning Brains - -The **Learning Brain** works differently if you are training it or not. -When training your Agents, drag the **Learning Brain** to the -Academy's `Broadcast Hub` and check the checkbox `Control`. When using a pre-trained -model, just drag the Model file into the `Model` property of the **Learning Brain**. - -## Training Mode / External Control - -When [running an ML-Agents training algorithm](Training-ML-Agents.md), at least -one Brain asset must be in the Academy's `Broadcast Hub` with the checkbox `Control` -checked. This allows the training process to collect the observations of Agents -using that Brain and give the Agents their actions. - -In addition to using a **Learning Brain** for training using the ML-Agents learning -algorithms, you can use a **Learning Brain** to control Agents in a Unity -environment using an external Python program. See [Python API](Python-API.md) -for more information. - -## Inference Mode / Internal Control - -When not training, the **Learning Brain** uses a -[TensorFlow model](https://www.tensorflow.org/get_started/get_started_for_beginners#models_and_training) -to make decisions. The Proximal Policy Optimization (PPO) and Behavioral Cloning -algorithms included with the ML-Agents SDK produce trained TensorFlow models -that you can use with the Learning Brain type. - -A __model__ is a mathematical relationship mapping an agent's observations to -its actions. TensorFlow is a software library for performing numerical -computation through data flow graphs. A TensorFlow model, then, defines the -mathematical relationship between your Agent's observations and its actions -using a TensorFlow data flow graph. - -### Creating a graph model - -The training algorithms included in the ML-Agents SDK produce TensorFlow graph -models as the end result of the training process. See -[Training ML-Agents](Training-ML-Agents.md) for instructions on how to train a -model. - -### Using a graph model - -To use a graph model: - -1. Select the **Learning Brain** asset in the **Project** window of the Unity Editor. -2. Import the `model_name` file produced by the PPO training - program. (Where `model_name` is the name of the model file, which is - constructed from the name of your Unity environment executable and the run-id - value you assigned when running the training process.) - - You can - [import assets into Unity](https://docs.unity3d.com/Manual/ImportingAssets.html) - in various ways. The easiest way is to simply drag the file into the - **Project** window and drop it into an appropriate folder. -3. Once the `model_name.nn` file is imported, drag it from the **Project** - window to the **Model** field of the Brain component. - -If you are using a model produced by the ML-Agents `mlagents-learn` command, use -the default values for the other Learning Brain parameters. - -### Learning Brain properties - -The default values of the TensorFlow graph parameters work with the model -produced by the PPO and BC training code in the ML-Agents SDK. To use a default -ML-Agents model, the only parameter that you need to set is the `Model`, -which must be set to the `.nn` file containing the trained model itself. - -* `Model` : This must be the `.nn` file corresponding to the pre-trained - TensorFlow graph. (You must first drag this file into your Project window - and then from the Resources folder into the inspector) - diff --git a/docs/Learning-Environment-Design-Player-Brains.md b/docs/Learning-Environment-Design-Player-Brains.md deleted file mode 100644 index b69349b0fb..0000000000 --- a/docs/Learning-Environment-Design-Player-Brains.md +++ /dev/null @@ -1,37 +0,0 @@ -# Player Brain - -The **Player Brain** allows you to control an Agent using keyboard -commands. You can use Player Brains to control a "teacher" Agent that trains -other Agents during [imitation learning](Training-Imitation-Learning.md). You -can also use Player Brains to test your Agents and environment before replacing them by **Learning Brains** and running the training process. - -## Player Brain properties - -The **Player Brain** properties allow you to assign one or more keyboard keys to -each action and a unique value to send when a key is pressed. - -![Player Brain Inspector](images/player_brain.png) - -Note the differences between the discrete and continuous action spaces. When a -Brain uses the discrete action space, you can send one integer value as the -action per step. In contrast, when a Brain uses the continuous action space you -can send any number of floating point values (up to the **Vector Action Space -Size** setting). - -| **Property** | | **Description** | -| :---------------------------- | :--------------- | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| **Continuous Player Actions** | | The mapping for the continuous vector action space. Shown when the action space is **Continuous**. | -| | **Size** | The number of key commands defined. You can assign more than one command to the same action index in order to send different values for that action. (If you press both keys at the same time, deterministic results are not guaranteed.) | -| | **Element 0–N** | The mapping of keys to action values. | -| | **Key** | The key on the keyboard. | -| | **Index** | The element of the Agent's action vector to set when this key is pressed. The index value cannot exceed the size of the Action Space (minus 1, since it is an array index). | -| | **Value** | The value to send to the Agent as its action for the specified index when the mapped key is pressed. All other members of the action vector are set to 0. | -| **Discrete Player Actions** | | The mapping for the discrete vector action space. Shown when the action space is **Discrete**. | -| | **Size** | The number of key commands defined. | -| | **Element 0–N** | The mapping of keys to action values. | -| | **Key** | The key on the keyboard. | -| | **Branch Index** | The element of the Agent's action vector to set when this key is pressed. The index value cannot exceed the size of the Action Space (minus 1, since it is an array index). | -| | **Value** | The value to send to the Agent as its action when the mapped key is pressed. Cannot exceed the max value for the associated branch (minus 1, since it is an array index). Note that if no key is pressed for that branch, the default action will be 0. | - -For more information about the Unity input system, see -[Input](https://docs.unity3d.com/ScriptReference/Input.html). diff --git a/docs/Learning-Environment-Design.md b/docs/Learning-Environment-Design.md index d1bf144d9a..91863cc596 100644 --- a/docs/Learning-Environment-Design.md +++ b/docs/Learning-Environment-Design.md @@ -33,9 +33,7 @@ one training episode is finished. During training, the external Python training process communicates with the Academy to run a series of episodes while it collects data and optimizes its -neural network model. The kind of Brain assigned to an Agent determines whether -it participates in training or not. The **Learning Brain** can be used to train -or execute a TensorFlow model. When training is completed +neural network model. When training is completed successfully, you can add the trained model file to your Unity project for later use. @@ -44,10 +42,10 @@ The ML-Agents Academy class orchestrates the agent simulation loop as follows: 1. Calls your Academy subclass's `AcademyReset()` function. 2. Calls the `AgentReset()` function for each Agent in the scene. 3. Calls the `CollectObservations()` function for each Agent in the scene. -4. Uses each Agent's Brain to decide on the Agent's next action. +4. Uses each Agent's Policy to decide on the Agent's next action. 5. Calls your subclass's `AcademyStep()` function. 6. Calls the `AgentAction()` function for each Agent in the scene, passing in - the action chosen by the Agent's Brain. (This function is not called if the + the action chosen by the Agent's Policy. (This function is not called if the Agent is done.) 7. Calls the Agent's `AgentOnDone()` function if the Agent has reached its `Max Step` count or has otherwise marked itself as `done`. Optionally, you can set @@ -69,15 +67,9 @@ information. To train and use the ML-Agents toolkit in a Unity scene, the scene must contain a single Academy subclass and as many Agent subclasses -as you need. The Brain assets are present in the project and should be grouped -together and named according to the type of agents they are compatible with. +as you need. Agent instances should be attached to the GameObject representing that Agent. -You must assign a Brain to every Agent, but you can share Brains between -multiple Agents. Each Agent will make its own observations and act -independently, but will use the same decision-making logic and, for **Learning -Brains**, the same trained TensorFlow model. - ### Academy The Academy object orchestrates Agents and their decision making processes. Only @@ -101,59 +93,31 @@ following methods (all are optional): See [Academy](Learning-Environment-Design-Academy.md) for a complete list of the Academy properties and their uses. -### Brain - -The Brain encapsulates the decision making process. Every Agent must be -assigned a Brain, but you can use the same Brain with more than one Agent. -__Note__:You can assign the same Brain to multiple agents by using prefabs -or by selecting all the agents you want to attach the Brain to using the -search bar on top of the Scene Hierarchy window. - -To Create a Brain, go to `Assets -> Create -> Ml-Agents` and select the -type of Brain you want to use. During training, use a **Learning Brain** -and drag it into the Academy's `Broadcast Hub` with the `Control` checkbox checked. -When you want to use the trained model, import the model file into the Unity -project, add it to the **Model** property of the **Learning Brain** and uncheck -the `Control` checkbox of the `Broadcast Hub`. See -[Brains](Learning-Environment-Design-Brains.md) for details on using the -different types of Brains. You can create new kinds of Brains if the three -built-in don't do what you need. - -The Brain class has several important properties that you can set using the -Inspector window. These properties must be appropriate for the Agents using the -Brain. For example, the `Vector Observation Space Size` property must match the -length of the feature vector created by an Agent exactly. See -[Agents](Learning-Environment-Design-Agents.md) for information about creating -agents and setting up a Brain instance correctly. - -See [Brains](Learning-Environment-Design-Brains.md) for a complete list of the -Brain properties. - ### Agent The Agent class represents an actor in the scene that collects observations and carries out actions. The Agent class is typically attached to the GameObject in the scene that otherwise represents the actor — for example, to a player object -in a football game or a car object in a vehicle simulation. Every Agent must be -assigned a Brain. +in a football game or a car object in a vehicle simulation. Every Agent must +have appropriate `Behavior Parameters`. To create an Agent, extend the Agent class and implement the essential `CollectObservations()` and `AgentAction()` methods: * `CollectObservations()` — Collects the Agent's observation of its environment. -* `AgentAction()` — Carries out the action chosen by the Agent's Brain and +* `AgentAction()` — Carries out the action chosen by the Agent's Policy and assigns a reward to the current state. -Your implementations of these functions determine how the properties of the -Brain assigned to this Agent must be set. +Your implementations of these functions determine how the Behavior Parameters +assigned to this Agent must be set. You must also determine how an Agent finishes its task or times out. You can manually set an Agent to done in your `AgentAction()` function when the Agent -has finished (or irrevocably failed) its task by calling the `Done()` function. -You can also set the Agent's `Max Steps` property to a positive value and the -Agent will consider itself done after it has taken that many steps. If you -set an Agent's `ResetOnDone` property to true, then the Agent can attempt its -task several times in one episode. (Use the `Agent.AgentReset()` function to +has finished (or irrevocably failed) its task by calling the `Done()` function. +You can also set the Agent's `Max Steps` property to a positive value and the +Agent will consider itself done after it has taken that many steps. If you +set an Agent's `ResetOnDone` property to true, then the Agent can attempt its +task several times in one episode. (Use the `Agent.AgentReset()` function to prepare the Agent to start again.) See [Agents](Learning-Environment-Design-Agents.md) for detailed information @@ -175,15 +139,13 @@ to control the agent decision making process. The Academy defines several properties that can be set differently for a training scene versus a regular scene. The Academy's **Configuration** properties control rendering and time scale. You can set the **Training Configuration** to minimize the time Unity -spends rendering graphics in order to speed up training. +spends rendering graphics in order to speed up training. When you create a training environment in Unity, you must set up the scene so that it can be controlled by the external training process. Considerations include: * The training scene must start automatically when your Unity application is launched by the training process. -* The scene must include an Academy with at least one Brain in the `Broadcast Hub` - with the `Control` checkbox checked. * The Academy must reset the scene to a valid starting point for each episode of training. * A training episode must have a definite end — either using `Max Steps` or by diff --git a/docs/Learning-Environment-Examples.md b/docs/Learning-Environment-Examples.md index 36f760e750..a404304a87 100644 --- a/docs/Learning-Environment-Examples.md +++ b/docs/Learning-Environment-Examples.md @@ -1,4 +1,4 @@ -# Example Learning Environments +# Example Learning Environments The Unity ML-Agents toolkit contains an expanding set of example environments which demonstrate various features of the platform. Environments are located in @@ -24,11 +24,11 @@ If you would like to contribute environments, please see our * Set-up: A linear movement task where the agent must move left or right to rewarding states. * Goal: Move to the most reward state. -* Agents: The environment contains one agent linked to a single Brain. +* Agents: The environment contains one agent. * Agent Reward Function: * +0.1 for arriving at suboptimal state. * +1.0 for arriving at optimal state. -* Brains: One Brain with the following observation/action space. +* Behavior Parameters: * Vector Observation space: One variable corresponding to current state. * Vector Action space: (Discrete) Two possible actions (Move left, move right). @@ -42,12 +42,12 @@ If you would like to contribute environments, please see our * Set-up: A balance-ball task, where the agent balances the ball on it's head. * Goal: The agent must balance the ball on it's head for as long as possible. -* Agents: The environment contains 12 agents of the same kind, all linked to a - single Brain. +* Agents: The environment contains 12 agents of the same kind, all using the + same Behavior Parameters. * Agent Reward Function: * +0.1 for every step the ball remains on it's head. * -1.0 if the ball falls off. -* Brains: One Brain with the following observation/action space. +* Behavior Parameters: * Vector Observation space: 8 variables corresponding to rotation of the agent cube, and position and velocity of ball. * Vector Observation space (Hard Version): 5 variables corresponding to @@ -60,7 +60,7 @@ If you would like to contribute environments, please see our * Default: 1 * Recommended Minimum: 0.2 * Recommended Maximum: 5 - * gravity: Magnitude of gravity + * gravity: Magnitude of gravity * Default: 9.81 * Recommended Minimum: 4 * Recommended Maximum: 105 @@ -78,12 +78,12 @@ If you would like to contribute environments, please see our and obstacles. * Goal: The agent must navigate the grid to the goal while avoiding the obstacles. -* Agents: The environment contains one agent linked to a single Brain. +* Agents: The environment contains nine agents with the same Behavior Parameters. * Agent Reward Function: * -0.01 for every step. * +1.0 if the agent navigates to the goal position of the grid (episode ends). * -1.0 if the agent navigates to an obstacle (episode ends). -* Brains: One Brain with the following observation/action space. +* Behavior Parameters: * Vector Observation space: None * Vector Action space: (Discrete) Size of 4, corresponding to movement in cardinal directions. Note that for this environment, @@ -104,13 +104,13 @@ If you would like to contribute environments, please see our net. * Goal: The agents must bounce ball between one another while not dropping or sending ball out of bounds. -* Agents: The environment contains two agent linked to a single Brain named - TennisBrain. After training you can attach another Brain named MyBrain to one - of the agent to play against your trained model. +* Agents: The environment contains two agent with same Behavior Parameters. + After training you can check the `Use Heuristic` checkbox on one of the Agents + to play against your trained model. * Agent Reward Function (independent): * +0.1 To agent when hitting ball over net. * -0.1 To agent who let ball hit their ground, or hit ball out of bounds. -* Brains: One Brain with the following observation/action space. +* Behavior Parameters: * Vector Observation space: 8 variables corresponding to position and velocity of ball and racket. * Vector Action space: (Continuous) Size of 2, corresponding to movement @@ -119,7 +119,7 @@ If you would like to contribute environments, please see our * Reset Parameters: Three * angle: Angle of the racket from the vertical (Y) axis. * Default: 55 - * Recommended Minimum: 35 + * Recommended Minimum: 35 * Recommended Maximum: 65 * gravity: Magnitude of gravity * Default: 9.81 @@ -130,7 +130,6 @@ If you would like to contribute environments, please see our * Recommended Minimum: 0.2 * Recommended Maximum: 5 * Benchmark Mean Reward: 2.5 -* Optional Imitation Learning scene: `TennisIL`. ## [Push Block](https://youtu.be/jKdw216ZgoE) @@ -138,11 +137,11 @@ If you would like to contribute environments, please see our * Set-up: A platforming environment where the agent can push a block around. * Goal: The agent must push the block to the goal. -* Agents: The environment contains one agent linked to a single Brain. +* Agents: The environment contains one agent. * Agent Reward Function: * -0.0025 for every step. * +1.0 if the block touches the goal. -* Brains: One Brain with the following observation/action space. +* Behavior Parameters: * Vector Observation space: (Continuous) 70 variables corresponding to 14 ray-casts each detecting one of three possible objects (wall, goal, or block). @@ -170,7 +169,6 @@ If you would like to contribute environments, please see our * Recommended Minimum: 0 * Recommended Maximum: 2000 * Benchmark Mean Reward: 4.5 -* Optional Imitation Learning scene: `PushBlockIL`. ## [Wall Jump](https://youtu.be/NITLug2DIWQ) @@ -178,13 +176,14 @@ If you would like to contribute environments, please see our * Set-up: A platforming environment where the agent can jump over a wall. * Goal: The agent must use the block to scale the wall and reach the goal. -* Agents: The environment contains one agent linked to two different Brains. The - Brain the agent is linked to changes depending on the height of the wall. +* Agents: The environment contains one agent linked to two different + Models. The Policy the agent is linked to changes depending on the + height of the wall. The change of Policy is done in the WallJumpAgent class. * Agent Reward Function: * -0.0005 for every step. * +1.0 if the agent touches the goal. * -1.0 if the agent falls off the platform. -* Brains: Two Brains, each with the following observation/action space. +* Behavior Parameters: * Vector Observation space: Size of 74, corresponding to 14 ray casts each detecting 4 possible objects. plus the global position of the agent and whether or not the agent is grounded. @@ -195,7 +194,7 @@ If you would like to contribute environments, please see our * Jump (2 possible actions: Jump, No Action) * Visual Observations: None * Reset Parameters: Four -* Benchmark Mean Reward (Big & Small Wall Brain): 0.8 +* Benchmark Mean Reward (Big & Small Wall): 0.8 ## [Reacher](https://youtu.be/2N9EoF6pQyE) @@ -203,10 +202,10 @@ If you would like to contribute environments, please see our * Set-up: Double-jointed arm which can move to target locations. * Goal: The agents must move its hand to the goal location, and keep it there. -* Agents: The environment contains 10 agent linked to a single Brain. +* Agents: The environment contains 10 agent with same Behavior Parameters. * Agent Reward Function (independent): * +0.1 Each step agent's hand is in goal location. -* Brains: One Brain with the following observation/action space. +* Behavior Parameters: * Vector Observation space: 26 variables corresponding to position, rotation, velocity, and angular velocities of the two arm Rigidbodies. * Vector Action space: (Continuous) Size of 4, corresponding to torque @@ -243,11 +242,11 @@ If you would like to contribute environments, please see our * Goal: The agents must move its body toward the goal direction without falling. * `CrawlerStaticTarget` - Goal direction is always forward. * `CrawlerDynamicTarget`- Goal direction is randomized. -* Agents: The environment contains 3 agent linked to a single Brain. +* Agents: The environment contains 3 agent with same Behavior Parameters. * Agent Reward Function (independent): * +0.03 times body velocity in the goal direction. * +0.01 times body direction alignment with goal direction. -* Brains: One Brain with the following observation/action space. +* Behavior Parameters: * Vector Observation space: 117 variables corresponding to position, rotation, velocity, and angular velocities of each limb plus the acceleration and angular acceleration of the body. @@ -265,11 +264,11 @@ If you would like to contribute environments, please see our * Set-up: A multi-agent environment where agents compete to collect food. * Goal: The agents must learn to collect as many green food spheres as possible while avoiding red spheres. -* Agents: The environment contains 5 agents linked to a single Brain. +* Agents: The environment contains 5 agents with same Behavior Parameters. * Agent Reward Function (independent): * +1 for interaction with green spheres * -1 for interaction with red spheres -* Brains: One Brain with the following observation/action space. +* Behavior Parameters: * Vector Observation space: 53 corresponding to velocity of agent (2), whether agent is frozen and/or shot its laser (2), plus ray-based perception of objects around agent's forward direction (49; 7 raycast angles with 7 @@ -293,7 +292,6 @@ If you would like to contribute environments, please see our * Recommended Minimum: 0.5 * Recommended Maximum: 5 * Benchmark Mean Reward: 10 -* Optional Imitation Learning scene: `FoodCollectorIL`. ## [Hallway](https://youtu.be/53GyfpPQRUQ) @@ -303,12 +301,12 @@ If you would like to contribute environments, please see our remember it, and use it to move to the correct goal. * Goal: Move to the goal which corresponds to the color of the block in the room. -* Agents: The environment contains one agent linked to a single Brain. +* Agents: The environment contains one agent. * Agent Reward Function (independent): * +1 For moving to correct goal. * -0.1 For moving to incorrect goal. * -0.0003 Existential penalty. -* Brains: One Brain with the following observation/action space: +* Behavior Parameters: * Vector Observation space: 30 corresponding to local ray-casts detecting objects, goals, and walls. * Vector Action space: (Discrete) 1 Branch, 4 actions corresponding to agent @@ -320,7 +318,6 @@ If you would like to contribute environments, please see our * Reset Parameters: None * Benchmark Mean Reward: 0.7 * To speed up training, you can enable curiosity by adding `use_curiosity: true` in `config/trainer_config.yaml` -* Optional Imitation Learning scene: `HallwayIL`. ## [Bouncer](https://youtu.be/Tkv-c-b1b2I) @@ -329,12 +326,12 @@ If you would like to contribute environments, please see our * Set-up: Environment where the agent needs on-demand decision making. The agent must decide how perform its next bounce only when it touches the ground. * Goal: Catch the floating green cube. Only has a limited number of jumps. -* Agents: The environment contains one agent linked to a single Brain. +* Agents: The environment contains one agent. * Agent Reward Function (independent): * +1 For catching the green cube. * -1 For bouncing out of bounds. * -0.05 Times the action squared. Energy expenditure penalty. -* Brains: One Brain with the following observation/action space: +* Behavior Parameters: * Vector Observation space: 6 corresponding to local position of agent and green cube. * Vector Action space: (Continuous) 3 corresponding to agent force applied for @@ -355,8 +352,8 @@ If you would like to contribute environments, please see our * Goal: * Striker: Get the ball into the opponent's goal. * Goalie: Prevent the ball from entering its own goal. -* Agents: The environment contains four agents, with two linked to one Brain - (strikers) and two linked to another (goalies). +* Agents: The environment contains four agents, with two different sets of + Behavior Parameters : Striker and Goalie. * Agent Reward Function (dependent): * Striker: * +1 When ball enters opponent's goal. @@ -366,7 +363,7 @@ If you would like to contribute environments, please see our * -1 When ball enters team's goal. * +0.1 When ball enters opponents goal. * +0.001 Existential bonus. -* Brains: Two Brain with the following observation/action space: +* Behavior Parameters: * Vector Observation space: 112 corresponding to local 14 ray casts, each detecting 7 possible object types, along with the object's distance. Perception is in 180 degree view from front of agent. @@ -384,7 +381,7 @@ If you would like to contribute environments, please see our * Default: 9.81 * Recommended minimum: 6 * Recommended maximum: 20 -* Benchmark Mean Reward (Striker & Goalie Brain): 0 (the means will be inverse +* Benchmark Mean Reward (Striker & Goalie): 0 (the means will be inverse of each other and criss crosses during training) __Note that our trainer is currently unable to consistently train this environment__ ## Walker @@ -396,14 +393,13 @@ If you would like to contribute environments, please see our head, thighs, shins, feet, arms, forearms and hands. * Goal: The agents must move its body toward the goal direction as quickly as possible without falling. -* Agents: The environment contains 11 independent agent linked to a single - Brain. +* Agents: The environment contains 11 independent agents with same Behavior Parameters. * Agent Reward Function (independent): * +0.03 times body velocity in the goal direction. * +0.01 times head y position. * +0.01 times body direction alignment with goal direction. * -0.01 times head velocity difference from body velocity. -* Brains: One Brain with the following observation/action space. +* Behavior Parameters: * Vector Observation space: 215 variables corresponding to position, rotation, velocity, and angular velocities of each limb, along with goal direction. * Vector Action space: (Continuous) Size of 39, corresponding to target @@ -436,10 +432,10 @@ If you would like to contribute environments, please see our pyramid, then navigate to the pyramid, knock it over, and move to the gold brick at the top. * Goal: Move to the golden brick on top of the spawned pyramid. -* Agents: The environment contains one agent linked to a single Brain. +* Agents: The environment contains one agent. * Agent Reward Function (independent): * +2 For moving to golden brick (minus 0.001 per step). -* Brains: One Brain with the following observation/action space: +* Behavior Parameters: * Vector Observation space: 148 corresponding to local ray-casts detecting switch, bricks, golden brick, and walls, plus variable indicating switch state. @@ -450,5 +446,4 @@ If you would like to contribute environments, please see our this environment does not train with the provided default training parameters.__ * Reset Parameters: None -* Optional Imitation Learning scene: `PyramidsIL`. * Benchmark Mean Reward: 1.75 diff --git a/docs/Learning-Environment-Executable.md b/docs/Learning-Environment-Executable.md index 45fc574e3f..c6d527324b 100644 --- a/docs/Learning-Environment-Executable.md +++ b/docs/Learning-Environment-Executable.md @@ -27,14 +27,6 @@ environment: ![3DBall Scene](images/mlagents-Open3DBall.png) -Make sure the Brains in the scene have the right type. For example, if you want -to be able to control your agents from Python, you will need to put the Brain -controlling the Agents to be a **Learning Brain** and drag it into the -Academy's `Broadcast Hub` with the `Control` checkbox checked. In the 3DBall -scene, this can be done in the Platform GameObject within the Game prefab in -`Assets/ML-Agents/Examples/3DBall/Prefabs/`, or in each instance of the -Platform in the Scene. - Next, we want the set up scene to play correctly when the training process launches our environment executable. This means: @@ -201,19 +193,18 @@ INFO:mlagents.trainers: first-run-0: Ball3DLearning: Step: 10000. Mean Reward: 2 ``` You can press Ctrl+C to stop the training, and your trained model will be at -`models//.nn`, which corresponds +`models//.nn`, which corresponds to your model's latest checkpoint. (**Note:** There is a known bug on Windows that causes the saving of the model to fail when you early terminate the training, it's recommended to wait until Step has reached the max_steps parameter you set in trainer_config.yaml.) You can now embed this trained model -into your Learning Brain by following the steps below: +into your Agent by following the steps below: 1. Move your model file into `UnitySDK/Assets/ML-Agents/Examples/3DBall/TFModels/`. 2. Open the Unity Editor, and select the **3DBall** scene as described above. -3. Select the **Ball3DLearning** object from the Project window. -5. Drag the `.nn` file from the Project window of - the Editor to the **Model** placeholder in the **Ball3DLearning** +3. Select the **3DBall** prefab from the Project window and select **Agent**. +5. Drag the `.nn` file from the Project window of + the Editor to the **Model** placeholder in the **Ball3DAgent** inspector window. -6. Remove the **Ball3DLearning** from the Academy's `Broadcast Hub` -7. Press the Play button at the top of the editor. +6. Press the Play button at the top of the editor. diff --git a/docs/ML-Agents-Overview.md b/docs/ML-Agents-Overview.md index f194c64ef0..4728d6913d 100644 --- a/docs/ML-Agents-Overview.md +++ b/docs/ML-Agents-Overview.md @@ -131,36 +131,31 @@ components: _Simplified block diagram of ML-Agents._ -The Learning Environment contains three additional components that help +The Learning Environment contains two additional components that help organize the Unity scene: - **Agents** - which is attached to a Unity GameObject (any character within a scene) and handles generating its observations, performing the actions it receives and assigning a reward (positive / negative) when appropriate. Each - Agent is linked to exactly one Brain. -- **Brains** - which encapsulates the logic for making decisions for the Agent. - In essence, the Brain is what holds on to the policy for each Agent and - determines which actions the Agent should take at each instance. More - specifically, it is the component that receives the observations and rewards - from the Agent and returns an action. + Agent is linked to a Policy. - **Academy** - which orchestrates the observation and decision making process. Within the Academy, several environment-wide parameters such as the rendering quality and the speed at which the environment is run can be specified. The External Communicator lives within the Academy. Every Learning Environment will always have one global Academy and one Agent for -every character in the scene. While each Agent must be linked to a Brain, it is -possible for Agents that have similar observations and actions to be linked to -the same Brain. In our sample game, we have two teams each with their own medic. +every character in the scene. While each Agent must be linked to a Policy, it is +possible for Agents that have similar observations and actions to have +the same Policy type. In our sample game, we have two teams each with their own medic. Thus we will have two Agents in our Learning Environment, one for each medic, -but both of these medics can be linked to the same Brain. Note that these two -medics are linked to the same Brain because their _space_ of observations and +but both of these medics can have the same Policy. Note that these two +medics have the same Policy because their _space_ of observations and actions are similar. This does not mean that at each instance they will have -identical observation and action _values_. In other words, the Brain defines the +identical observation and action _values_. In other words, the Policy defines the space of all possible observations and actions, while the Agents connected to it (in this case the medics) can each have their own, unique observation and action values. If we expanded our game to include tank driver NPCs, then the Agent -attached to those characters cannot share a Brain with the Agent linked to the +attached to those characters cannot share a Policy with the Agent linked to the medics (medics and drivers have different actions).

@@ -174,46 +169,11 @@ _Example block diagram of ML-Agents toolkit for our sample game._ We have yet to discuss how the ML-Agents toolkit trains behaviors, and what role the Python API and External Communicator play. Before we dive into those details, let's summarize the earlier components. Each character is attached to -an Agent, and each Agent is linked to a Brain. The Brain receives observations +an Agent, and each Agent has a Policy. The Policy receives observations and rewards from the Agent and returns actions. The Academy ensures that all the -Agents and Brains are in sync in addition to controlling environment-wide -settings. So how does the Brain control what the Agent does? - -In practice, we have three different categories of Brains, which enable a wide -range of training and inference scenarios: - -- **Learning** - where decisions are made using an embedded - [TensorFlow](Background-TensorFlow.md) model. The embedded TensorFlow model - represents a learned policy and the Brain directly uses this model to - determine the action for each Agent. You can train a **Learning Brain** - by dragging it into the Academy's `Broadcast Hub` with the `Control` - checkbox checked. -- **Player** - where decisions are made using real input from a keyboard or - controller. Here, a human player is controlling the Agent and the observations - and rewards collected by the Brain are not used to control the Agent. -- **Heuristic** - where decisions are made using hard-coded behavior. This - resembles how most character behaviors are currently defined and can be - helpful for debugging or comparing how an Agent with hard-coded rules compares - to an Agent whose behavior has been trained. In our example, once we have - trained a Brain for the medics we could assign a medic on one team to the - trained Brain and assign the medic on the other team a Heuristic Brain with - hard-coded behaviors. We can then evaluate which medic is more effective. - -As currently described, it may seem that the External Communicator and Python -API are only leveraged by the Learning Brain. This is not true. It is possible -to configure the Learning, Player and Heuristic Brains to also send the -observations, rewards and actions to the Python API through the External -Communicator (a feature called _broadcasting_). As we will see shortly, this -enables additional training modes. +Agents are in sync in addition to controlling environment-wide +settings. -

- ML-Agents Scene Block Diagram -

- -_An example of how a scene containing multiple Agents and Brains might be -configured._ ## Training Modes @@ -224,27 +184,24 @@ inference can proceed. As mentioned previously, the ML-Agents toolkit ships with several implementations of state-of-the-art algorithms for training intelligent agents. -In this mode, the only Brain used is a **Learning Brain**. More -specifically, during training, all the medics in the +More specifically, during training, all the medics in the scene send their observations to the Python API through the External -Communicator (this is the behavior with an External Brain). The Python API +Communicator. The Python API processes these observations and sends back actions for each medic to take. During training these actions are mostly exploratory to help the Python API learn the best policy for each medic. Once training concludes, the learned policy for each medic can be exported. Given that all our implementations are based on TensorFlow, the learned policy is just a TensorFlow model file. Then -during the inference phase, we use the **Learning Brain** in internal mode -and include the +during the inference phase, we use the TensorFlow model generated from the training phase. Now during the inference -phase, the medics still continue to generate their observations, but instead of +phase, the medics still continue to generate their observations, but instead of being sent to the Python API, they will be fed into their (internal, embedded) model to generate the _optimal_ action for each medic to take at every point in time. To summarize: our built-in implementations are based on TensorFlow, thus, during training the Python API uses the observations it receives to learn a TensorFlow -model. This model is then embedded within the Learning Brain during inference to -generate the optimal actions for all Agents linked to that Brain. +model. This model is then embedded within the Agent during inference. The [Getting Started with the 3D Balance Ball Example](Getting-Started-with-Balance-Ball.md) @@ -252,12 +209,11 @@ tutorial covers this training mode with the **3D Balance Ball** sample environme ### Custom Training and Inference -In the previous mode, the Learning Brain was used for training to generate -a TensorFlow model that the Learning Brain can later use. However, +In the previous mode, the Agents were used for training to generate +a TensorFlow model that the Agents can later use. However, any user of the ML-Agents toolkit can leverage their own algorithms for -training. In this case, the Brain type would be set to Learning and be linked -to the BroadcastHub (with checked `Control` checkbox) -and the behaviors of all the Agents in the scene will be controlled within Python. +training. In this case, the behaviors of all the Agents in the scene +will be controlled within Python. You can even turn your environment into a [gym.](../gym-unity/README.md) We do not currently have a tutorial highlighting this mode, but you can @@ -312,15 +268,15 @@ It is often more intuitive to simply demonstrate the behavior we want an agent to perform, rather than attempting to have it learn via trial-and-error methods. For example, instead of training the medic by setting up its reward function, this mode allows providing real examples from a game controller on how the medic -should behave. More specifically, in this mode, the Brain type during training -is set to Player and all the actions performed with the controller (in addition -to the agent observations) will be recorded and sent to the Python API. The +should behave. More specifically, in this mode, the Agent must use its heuristic +to generate action, and all the actions performed with the controller (in addition +to the agent observations) will be recorded. The imitation learning algorithm will then use these pairs of observations and actions from the human player to learn a policy. [Video Link](https://youtu.be/kpb8ZkMBFYs). The toolkit provides a way to learn directly from demonstrations, as well as use them -to help speed up reward-based training (RL). We include two algorithms called +to help speed up reward-based training (RL). We include two algorithms called Behavioral Cloning (BC) and Generative Adversarial Imitation Learning (GAIL). The [Training with Imitation Learning](Training-Imitation-Learning.md) tutorial covers these features in more depth. @@ -333,35 +289,35 @@ kinds of novel and fun environments the community creates. For those new to training intelligent agents, below are a few examples that can serve as inspiration: -- Single-Agent. A single agent linked to a single Brain, with its own reward +- Single-Agent. A single agent, with its own reward signal. The traditional way of training an agent. An example is any single-player game, such as Chicken. [Video Link](https://www.youtube.com/watch?v=fiQsmdwEGT8&feature=youtu.be). - Simultaneous Single-Agent. Multiple independent agents with independent reward - signals linked to a single Brain. A parallelized version of the traditional + signals with same `Behavior Parameters`. A parallelized version of the traditional training scenario, which can speed-up and stabilize the training process. Helpful when you have multiple versions of the same character in an environment who should learn similar behaviors. An example might be training a dozen robot-arms to each open a door simultaneously. [Video Link](https://www.youtube.com/watch?v=fq0JBaiCYNA). -- Adversarial Self-Play. Two interacting agents with inverse reward signals - linked to a single Brain. In two-player games, adversarial self-play can allow +- Adversarial Self-Play. Two interacting agents with inverse reward signals. + In two-player games, adversarial self-play can allow an agent to become increasingly more skilled, while always having the perfectly matched opponent: itself. This was the strategy employed when training AlphaGo, and more recently used by OpenAI to train a human-beating 1-vs-1 Dota 2 agent. - Cooperative Multi-Agent. Multiple interacting agents with a shared reward - signal linked to either a single or multiple different Brains. In this + signal with same or different `Behavior Parameters`. In this scenario, all agents must work together to accomplish a task that cannot be done alone. Examples include environments where each agent only has access to partial information, which needs to be shared in order to accomplish the task or collaboratively solve a puzzle. - Competitive Multi-Agent. Multiple interacting agents with inverse reward - signals linked to either a single or multiple different Brains. In this + signals with same or different `Behavior Parameters`. In this scenario, agents must compete with one another to either win a competition, or obtain some limited set of resources. All team sports fall into this scenario. -- Ecosystem. Multiple interacting agents with independent reward signals linked - to either a single or multiple different Brains. This scenario can be thought +- Ecosystem. Multiple interacting agents with independent reward signals with + same or different `Behavior Parameters`. This scenario can be thought of as creating a small world in which animals with different goals all interact, such as a savanna in which there might be zebras, elephants and giraffes, or an autonomous driving simulation within an urban environment. @@ -416,24 +372,6 @@ training process. [Training Generalized Reinforcement Learning Agents](Training-Generalized-Reinforcement-Learning-Agents.md) to learn more about this feature. -- **Broadcasting** - As discussed earlier, a Learning Brain sends the - observations for all its Agents to the Python API when dragged into the - Academy's `Broadcast Hub` with the `Control` checkbox checked. This is helpful - for training and later inference. Broadcasting is a feature which can be - enabled all types of Brains (Player, Learning, Heuristic) where the Agent - observations and actions are also sent to the Python API (despite the fact - that the Agent is **not** controlled by the Python API). This feature is - leveraged by Imitation Learning, where the observations and actions for a - Player Brain are used to learn the policies of an agent through demonstration. - However, this could also be helpful for the Heuristic and Learning Brains, - particularly when debugging agent behaviors. You can learn more about using - the broadcasting feature - [here](Learning-Environment-Design-Brains.md#using-the-broadcast-feature). - -- **Docker Set-up (Experimental)** - To facilitate setting up ML-Agents without - installing Python or TensorFlow directly, we provide a - [guide](Using-Docker.md) on how to create and run a Docker container. - - **Cloud Training on AWS** - To facilitate using the ML-Agents toolkit on Amazon Web Services (AWS) machines, we provide a [guide](Training-on-Amazon-Web-Service.md) on how to set-up EC2 instances in diff --git a/docs/Migrating.md b/docs/Migrating.md index 409dc3a92d..98b4f5100a 100644 --- a/docs/Migrating.md +++ b/docs/Migrating.md @@ -1,5 +1,22 @@ # Migrating +## Migrating from ML-Agents toolkit v0.10 to v0.11 + +### Important Changes +* The definition of the gRPC service has changed. +* The online BC training feature has been removed. +* The BroadcastHub has been deprecated. If there is a training Python process, all LearningBrains in the scene will automatically be trained. If there is no Python process, inference will be used. +* The Brain ScriptableObjects have been deprecated. The Brain Parameters are now on the Agent and are referred to as Behavior Parameters. Make sure the Behavior Parameters is attached to the Agent GameObject. +* Several changes were made to the setup for visual observations (i.e. using Cameras or RenderTextures): + * Camera resolutions are no longer stored in the Brain Parameters. + * AgentParameters no longer stores lists of Cameras and RenderTextures + * To add visual observations to an Agent, you must now attach a CameraSensorComponent or RenderTextureComponent to the agent. The corresponding Camera or RenderTexture can be added to these in the editor, and the resolution and color/grayscale is configured on the component itself. + +#### Steps to Migrate +* In order to be able to train, make sure both your ML-Agents Python package and UnitySDK code come from the v0.11 release. Training will not work, for example, if you update the ML-Agents Python package, and only update the API Version in UnitySDK. +* If your Agents used visual observations, you must add a CameraSensorComponent corresponding to each old Camera in the Agent's camera list (and similarly for RenderTextures). +* Since Brain ScriptableObjects have been removed, you will need to delete all the Brain ScriptableObjects from your `Assets` folder. Then, add a `Behavior Parameters` component to each `Agent` GameObject. You will then need to complete the fields on the new `Behavior Parameters` component with the BrainParameters of the old Brain. + ## Migrating from ML-Agents toolkit v0.9 to v0.10 ### Important Changes @@ -37,7 +54,7 @@ You may need to change `max_steps` in your config as appropriate as well. ## Migrating from ML-Agents toolkit v0.7 to v0.8 ### Important Changes -* We have split the Python packges into two seperate packages `ml-agents` and `ml-agents-envs`. +* We have split the Python packages into two separate packages `ml-agents` and `ml-agents-envs`. * `--worker-id` option of `learn.py` has been removed, use `--base-port` instead if you'd like to run multiple instances of `learn.py`. #### Steps to Migrate diff --git a/docs/Python-API.md b/docs/Python-API.md index d3fd7f8070..c97a1e554e 100644 --- a/docs/Python-API.md +++ b/docs/Python-API.md @@ -33,11 +33,9 @@ These classes are all defined in the `ml-agents/mlagents/envs` folder of the ML-Agents SDK. To communicate with an Agent in a Unity environment from a Python program, the -Agent must either use a Brain present in the Academy's `Broadcast Hub`. +Agent must use a LearningBrain. Your code is expected to return -actions for Agents with Brains with the `Control` checkbox of the -Academy's `Broadcast Hub` checked, but can only observe broadcasting -Brains (the information you receive for an Agent is the same in both cases). +actions for Agents with LearningBrains. _Notice: Currently communication between Unity and Python takes place over an open socket without authentication. As such, please make sure that the network @@ -99,10 +97,10 @@ variable named `env` in this example, can be used in the following way: - **Print : `print(str(env))`** Prints all parameters relevant to the loaded environment and the Brains. -- **Reset : `env.reset(train_model=True, config=None)`** +- **Reset : `env.reset(train_mode=True, config=None)`** Send a reset signal to the environment, and provides a dictionary mapping Brain names to BrainInfo objects. - - `train_model` indicates whether to run the environment in train (`True`) or + - `train_mode` indicates whether to run the environment in train (`True`) or test (`False`) mode. - `config` is an optional dictionary of configuration flags specific to the environment. For generic environments, `config` can be ignored. `config` is @@ -130,8 +128,7 @@ variable named `env` in this example, can be used in the following way: observations = brainInfo.vector_observations ``` - Note that if you have more than one Brain in the Academy's `Broadcast Hub` with - the `Control` checkbox checked, you + Note that if you have more than one LearningBrain in the scene, you must provide dictionaries from Brain names to arrays for `action`, `memory` and `value`. For example: If you have two Learning Brains named `brain1` and `brain2` each with one Agent taking two continuous actions, then you can diff --git a/docs/Readme.md b/docs/Readme.md index 4bbd802ad2..d88962a043 100644 --- a/docs/Readme.md +++ b/docs/Readme.md @@ -4,7 +4,7 @@ * [Installation](Installation.md) * [Background: Jupyter Notebooks](Background-Jupyter.md) - * [Docker Set-up](Using-Docker.md) + * [Using Virtual Environment](Using-Virtual-Environment.md) * [Basic Guide](Basic-Guide.md) ## Getting Started @@ -22,29 +22,36 @@ * [Designing a Learning Environment](Learning-Environment-Design.md) * [Agents](Learning-Environment-Design-Agents.md) * [Academy](Learning-Environment-Design-Academy.md) - * [Brains](Learning-Environment-Design-Brains.md): - [Player](Learning-Environment-Design-Player-Brains.md), - [Heuristic](Learning-Environment-Design-Heuristic-Brains.md), - [Learning](Learning-Environment-Design-Learning-Brains.md) * [Learning Environment Best Practices](Learning-Environment-Best-Practices.md) -* [Using the Monitor](Feature-Monitor.md) -* [Using the Video Recorder](https://github.com/Unity-Technologies/video-recorder) -* [Using an Executable Environment](Learning-Environment-Executable.md) -* [Creating Custom Protobuf Messages](Creating-Custom-Protobuf-Messages.md) + +### Advanced Usage + * [Using the Monitor](Feature-Monitor.md) + * [Using the Video Recorder](https://github.com/Unity-Technologies/video-recorder) + * [Using an Executable Environment](Learning-Environment-Executable.md) + * [Creating Custom Protobuf Messages](Creating-Custom-Protobuf-Messages.md) ## Training * [Training ML-Agents](Training-ML-Agents.md) +* [Using TensorBoard to Observe Training](Using-Tensorboard.md) +* [Training Using Concurrent Unity Instances](Training-Using-Concurrent-Unity-Instances.md) * [Training with Proximal Policy Optimization](Training-PPO.md) * [Training with Soft Actor-Critic](Training-SAC.md) + +### Advanced Training Methods + * [Training with Curriculum Learning](Training-Curriculum-Learning.md) * [Training with Imitation Learning](Training-Imitation-Learning.md) * [Training with LSTM](Feature-Memory.md) * [Training Generalized Reinforcement Learning Agents](Training-Generalized-Reinforcement-Learning-Agents.md) + +### Cloud Training (Deprecated) +Here are the cloud training set-up guides for Azure and AWS. We no longer use them ourselves and +so they may not be work correctly. We've decided to keep them up just in case they are helpful to +you. + * [Training on the Cloud with Amazon Web Services](Training-on-Amazon-Web-Service.md) * [Training on the Cloud with Microsoft Azure](Training-on-Microsoft-Azure.md) -* [Training Using Concurrent Unity Instances](Training-Using-Concurrent-Unity-Instances.md) -* [Using TensorBoard to Observe Training](Using-Tensorboard.md) ## Inference diff --git a/docs/Reward-Signals.md b/docs/Reward-Signals.md index 34c80259bf..7adbcf4861 100644 --- a/docs/Reward-Signals.md +++ b/docs/Reward-Signals.md @@ -5,7 +5,7 @@ that maximizes a reward. Typically, a reward is defined by your environment, and to reaching some goal. These are what we refer to as "extrinsic" rewards, as they are defined external of the learning algorithm. -Rewards, however, can be defined outside of the enviroment as well, to encourage the agent to +Rewards, however, can be defined outside of the environment as well, to encourage the agent to behave in certain ways, or to aid the learning of the true extrinsic reward. We refer to these rewards as "intrinsic" reward signals. The total reward that the agent will learn to maximize can be a mix of extrinsic and intrinsic reward signals. @@ -19,7 +19,7 @@ The `curiosity` reward signal helps your agent explore when extrinsic rewards ar Reward signals, like other hyperparameters, are defined in the trainer config `.yaml` file. An example is provided in `config/trainer_config.yaml` and `config/gail_config.yaml`. To enable a reward signal, add it to the -`reward_signals:` section under the brain name. For instance, to enable the extrinsic signal +`reward_signals:` section under the behavior name. For instance, to enable the extrinsic signal in addition to a small curiosity reward and a GAIL reward signal, you would define your `reward_signals` as follows: ```yaml @@ -74,9 +74,9 @@ Typical Range: `0.8` - `0.995` The `curiosity` Reward Signal enables the Intrinsic Curiosity Module. This is an implementation of the approach described in "Curiosity-driven Exploration by Self-supervised Prediction" by Pathak, et al. It trains two networks: -* an inverse model, which takes the current and next obersvation of the agent, encodes them, and +* an inverse model, which takes the current and next observation of the agent, encodes them, and uses the encoding to predict the action that was taken between the observations -* a forward model, which takes the encoded current obseravation and action, and predicts the +* a forward model, which takes the encoded current observation and action, and predicts the next encoded observation. The loss of the forward model (the difference between the predicted and actual encoded observations) is used as the intrinsic reward, so the more surprised the model is, the larger the reward will be. diff --git a/docs/Training-Behavioral-Cloning.md b/docs/Training-Behavioral-Cloning.md index 427c8db515..bdca019eae 100644 --- a/docs/Training-Behavioral-Cloning.md +++ b/docs/Training-Behavioral-Cloning.md @@ -1,92 +1,30 @@ # Training with Behavioral Cloning -There are a variety of possible imitation learning algorithms which can -be used, the simplest one of them is Behavioral Cloning. It works by collecting -demonstrations from a teacher, and then simply uses them to directly learn a -policy, in the same way the supervised learning for image classification +There are a variety of possible imitation learning algorithms which can +be used, the simplest one of them is Behavioral Cloning. It works by collecting +demonstrations from a teacher, and then simply uses them to directly learn a +policy, in the same way the supervised learning for image classification or other traditional Machine Learning tasks work. ## Offline Training -With offline behavioral cloning, we can use demonstrations (`.demo` files) +With offline behavioral cloning, we can use demonstrations (`.demo` files) generated using the `Demonstration Recorder` as the dataset used to train a behavior. -1. Choose an agent you would like to learn to imitate some set of demonstrations. -2. Record a set of demonstration using the `Demonstration Recorder` (see [here](Training-Imitation-Learning.md)). - For illustrative purposes we will refer to this file as `AgentRecording.demo`. -3. Build the scene, assigning the agent a Learning Brain, and set the Brain to - Control in the Broadcast Hub. For more information on Brains, see - [here](Learning-Environment-Design-Brains.md). -4. Open the `config/offline_bc_config.yaml` file. -5. Modify the `demo_path` parameter in the file to reference the path to the - demonstration file recorded in step 2. In our case this is: +1. Choose an agent you would like to learn to imitate some set of demonstrations. +2. Record a set of demonstration using the `Demonstration Recorder` (see [here](Training-Imitation-Learning.md)). + For illustrative purposes we will refer to this file as `AgentRecording.demo`. +3. Build the scene(make sure the Agent is not using its heuristic). +4. Open the `config/offline_bc_config.yaml` file. +5. Modify the `demo_path` parameter in the file to reference the path to the + demonstration file recorded in step 2. In our case this is: `./UnitySDK/Assets/Demonstrations/AgentRecording.demo` -6. Launch `mlagent-learn`, providing `./config/offline_bc_config.yaml` - as the config parameter, and include the `--run-id` and `--train` as usual. - Provide your environment as the `--env` parameter if it has been compiled +6. Launch `mlagent-learn`, providing `./config/offline_bc_config.yaml` + as the config parameter, and include the `--run-id` and `--train` as usual. + Provide your environment as the `--env` parameter if it has been compiled as standalone, or omit to train in the editor. 7. (Optional) Observe training performance using TensorBoard. -This will use the demonstration file to train a neural network driven agent -to directly imitate the actions provided in the demonstration. The environment +This will use the demonstration file to train a neural network driven agent +to directly imitate the actions provided in the demonstration. The environment will launch and be used for evaluating the agent's performance during training. - -## Online Training - -It is also possible to provide demonstrations in realtime during training, -without pre-recording a demonstration file. The steps to do this are as follows: - -1. First create two Brains, one which will be the "Teacher," and the other which - will be the "Student." We will assume that the names of the Brain - Assets are "Teacher" and "Student" respectively. -2. The "Teacher" Brain must be a **Player Brain**. You must properly - configure the inputs to map to the corresponding actions. -3. The "Student" Brain must be a **Learning Brain**. -4. The Brain Parameters of both the "Teacher" and "Student" Brains must be - compatible with the agent. -5. Drag both the "Teacher" and "Student" Brain into the Academy's `Broadcast Hub` - and check the `Control` checkbox on the "Student" Brain. -6. Link the Brains to the desired Agents (one Agent as the teacher and at least - one Agent as a student). -7. In `config/online_bc_config.yaml`, add an entry for the "Student" Brain. Set - the `trainer` parameter of this entry to `online_bc`, and the - `brain_to_imitate` parameter to the name of the teacher Brain: "Teacher". - Additionally, set `batches_per_epoch`, which controls how much training to do - each moment. Increase the `max_steps` option if you'd like to keep training - the Agents for a longer period of time. -8. Launch the training process with `mlagents-learn config/online_bc_config.yaml - --train --slow`, and press the :arrow_forward: button in Unity when the - message _"Start training by pressing the Play button in the Unity Editor"_ is - displayed on the screen -9. From the Unity window, control the Agent with the Teacher Brain by providing - "teacher demonstrations" of the behavior you would like to see. -10. Watch as the Agent(s) with the student Brain attached begin to behave - similarly to the demonstrations. -11. Once the Student Agents are exhibiting the desired behavior, end the training - process with `CTL+C` from the command line. -12. Move the resulting `*.nn` file into the `TFModels` subdirectory of the - Assets folder (or a subdirectory within Assets of your choosing) , and use - with `Learning` Brain. - -**BC Teacher Helper** - -We provide a convenience utility, `BC Teacher Helper` component that you can add -to the Teacher Agent. - -

- BC Teacher Helper -

- -This utility enables you to use keyboard shortcuts to do the following: - -1. To start and stop recording experiences. This is useful in case you'd like to - interact with the game _but not have the agents learn from these - interactions_. The default command to toggle this is to press `R` on the - keyboard. - -2. Reset the training buffer. This enables you to instruct the agents to forget - their buffer of recent experiences. This is useful if you'd like to get them - to quickly learn a new behavior. The default command to reset the buffer is - to press `C` on the keyboard. diff --git a/docs/Training-Curriculum-Learning.md b/docs/Training-Curriculum-Learning.md index e831987414..9e6ba0a574 100644 --- a/docs/Training-Curriculum-Learning.md +++ b/docs/Training-Curriculum-Learning.md @@ -31,9 +31,10 @@ accomplish tasks otherwise much more difficult. ## How-To -Each Brain in an environment can have a corresponding curriculum. These +Each group of Agents under the same `Behavior Name` in an environment can have +a corresponding curriculum. These curriculums are held in what we call a metacurriculum. A metacurriculum allows -different Brains to follow different curriculums within the same environment. +different groups of Agents to follow different curriculums within the same environment. ### Specifying a Metacurriculum @@ -53,7 +54,7 @@ describes the structure of the curriculum. Within it, we can specify which points in the training process our wall height will change, either based on the percentage of training steps which have taken place, or what the average reward the agent has received in the recent past is. Below is an example curriculum for -the BigWallBrain in the Wall Jump environment. +the BigWallBehavior in the Wall Jump environment. ```json { @@ -99,9 +100,10 @@ and modify the environment from the Agent's `AgentReset()` function. See for an example. We will save this file into our metacurriculum folder with the name of its -corresponding Brain. For example, in the Wall Jump environment, there are two -Brains---BigWallBrain and SmallWallBrain. If we want to define a curriculum for -the BigWallBrain, we will save `BigWallBrain.json` into +corresponding `Behavior Name`. For example, in the Wall Jump environment, there are two +different `Behaviors Name` set via script in `WallJumpAgent.cs` +---BigWallBrainLearning and SmallWallBrainLearning. If we want to define a curriculum for +the BigWallBrainLearning, we will save `BigWallBrainLearning.json` into `config/curricula/wall-jump/`. ### Training with a Curriculum diff --git a/docs/Training-Generalized-Reinforcement-Learning-Agents.md b/docs/Training-Generalized-Reinforcement-Learning-Agents.md index 29210781ce..d2ddd950f9 100644 --- a/docs/Training-Generalized-Reinforcement-Learning-Agents.md +++ b/docs/Training-Generalized-Reinforcement-Learning-Agents.md @@ -2,15 +2,15 @@ One of the challenges of training and testing agents on the same environment is that the agents tend to overfit. The result is that the -agents are unable to generalize to any tweaks or variations in the enviornment. -This is analgous to a model being trained and tested on an identical dataset +agents are unable to generalize to any tweaks or variations in the environment. +This is analogous to a model being trained and tested on an identical dataset in supervised learning. This becomes problematic in cases where environments -are randomly instantiated with varying objects or properties. +are randomly instantiated with varying objects or properties. To make agents robust and generalizable to different environments, the agent -should be trained over multiple variations of the enviornment. Using this approach +should be trained over multiple variations of the environment. Using this approach for training, the agent will be better suited to adapt (with higher performance) -to future unseen variations of the enviornment +to future unseen variations of the environment _Example of variations of the 3D Ball environment._ @@ -29,19 +29,19 @@ in the figure above, the reset parameters are `gravity`, `ball_mass` and `ball_s ## How to Enable Generalization Using Reset Parameters We first need to provide a way to modify the environment by supplying a set of `Reset Parameters` -and vary them over time. This provision can be done either deterministically or randomly. +and vary them over time. This provision can be done either deterministically or randomly. -This is done by assigning each `Reset Parameter` a `sampler-type`(such as a uniform sampler), +This is done by assigning each `Reset Parameter` a `sampler-type`(such as a uniform sampler), which determines how to sample a `Reset Parameter`. If a `sampler-type` isn't provided for a -`Reset Parameter`, the parameter maintains the default value throughout the -training procedure, remaining unchanged. The samplers for all the `Reset Parameters` -are handled by a **Sampler Manager**, which also handles the generation of new -values for the reset parameters when needed. - -To setup the Sampler Manager, we create a YAML file that specifies how we wish to -generate new samples for each `Reset Parameters`. In this file, we specify the samplers and the -`resampling-interval` (the number of simulation steps after which reset parameters are +`Reset Parameter`, the parameter maintains the default value throughout the +training procedure, remaining unchanged. The samplers for all the `Reset Parameters` +are handled by a **Sampler Manager**, which also handles the generation of new +values for the reset parameters when needed. + +To setup the Sampler Manager, we create a YAML file that specifies how we wish to +generate new samples for each `Reset Parameters`. In this file, we specify the samplers and the +`resampling-interval` (the number of simulation steps after which reset parameters are resampled). Below is an example of a sampler file for the 3D ball environment. ```yaml @@ -65,23 +65,23 @@ scale: Below is the explanation of the fields in the above example. -* `resampling-interval` - Specifies the number of steps for the agent to -train under a particular environment configuration before resetting the +* `resampling-interval` - Specifies the number of steps for the agent to +train under a particular environment configuration before resetting the environment with a new sample of `Reset Parameters`. -* `Reset Parameter` - Name of the `Reset Parameter` like `mass`, `gravity` and `scale`. This should match the name -specified in the academy of the intended environment for which the agent is -being trained. If a parameter specified in the file doesn't exist in the +* `Reset Parameter` - Name of the `Reset Parameter` like `mass`, `gravity` and `scale`. This should match the name +specified in the academy of the intended environment for which the agent is +being trained. If a parameter specified in the file doesn't exist in the environment, then this parameter will be ignored. Within each `Reset Parameter` - * `sampler-type` - Specify the sampler type to use for the `Reset Parameter`. - This is a string that should exist in the `Sampler Factory` (explained + * `sampler-type` - Specify the sampler type to use for the `Reset Parameter`. + This is a string that should exist in the `Sampler Factory` (explained below). - * `sampler-type-sub-arguments` - Specify the sub-arguments depending on the `sampler-type`. - In the example above, this would correspond to the `intervals` - under the `sampler-type` `"multirange_uniform"` for the `Reset Parameter` called gravity`. - The key name should match the name of the corresponding argument in the sampler definition. + * `sampler-type-sub-arguments` - Specify the sub-arguments depending on the `sampler-type`. + In the example above, this would correspond to the `intervals` + under the `sampler-type` `"multirange_uniform"` for the `Reset Parameter` called gravity`. + The key name should match the name of the corresponding argument in the sampler definition. (See below) The Sampler Manager allocates a sampler type for each `Reset Parameter` by using the *Sampler Factory*, @@ -93,29 +93,29 @@ to be used for each `Reset Parameter` is available in the Sampler Factory. Below is a list of included `sampler-type` as part of the toolkit. * `uniform` - Uniform sampler - * Uniformly samples a single float value between defined endpoints. - The sub-arguments for this sampler to specify the interval - endpoints are as below. The sampling is done in the range of + * Uniformly samples a single float value between defined endpoints. + The sub-arguments for this sampler to specify the interval + endpoints are as below. The sampling is done in the range of [`min_value`, `max_value`). * **sub-arguments** - `min_value`, `max_value` -* `gaussian` - Gaussian sampler +* `gaussian` - Gaussian sampler * Samples a single float value from the distribution characterized by - the mean and standard deviation. The sub-arguments to specify the + the mean and standard deviation. The sub-arguments to specify the gaussian distribution to use are as below. * **sub-arguments** - `mean`, `st_dev` * `multirange_uniform` - Multirange uniform sampler - * Uniformly samples a single float value between the specified intervals. - Samples by first performing a weight pick of an interval from the list - of intervals (weighted based on interval width) and samples uniformly - from the selected interval (half-closed interval, same as the uniform - sampler). This sampler can take an arbitrary number of intervals in a - list in the following format: + * Uniformly samples a single float value between the specified intervals. + Samples by first performing a weight pick of an interval from the list + of intervals (weighted based on interval width) and samples uniformly + from the selected interval (half-closed interval, same as the uniform + sampler). This sampler can take an arbitrary number of intervals in a + list in the following format: [[`interval_1_min`, `interval_1_max`], [`interval_2_min`, `interval_2_max`], ...] - + * **sub-arguments** - `intervals` The implementation of the samplers can be found at `ml-agents-envs/mlagents/envs/sampler_class.py`. @@ -124,7 +124,7 @@ The implementation of the samplers can be found at `ml-agents-envs/mlagents/envs If you want to define your own sampler type, you must first inherit the *Sampler* base class (included in the `sampler_class` file) and preserve the interface. -Once the class for the required method is specified, it must be registered in the Sampler Factory. +Once the class for the required method is specified, it must be registered in the Sampler Factory. This can be done by subscribing to the *register_sampler* method of the SamplerFactory. The command is as follows: @@ -164,7 +164,7 @@ our configured sampler file with the `--sampler` flag. For example, if we wanted sampling setup, we would run ```sh -mlagents-learn config/trainer_config.yaml --sampler=config/3dball_generalize.yaml +mlagents-learn config/trainer_config.yaml --sampler=config/3dball_generalize.yaml --run-id=3D-Ball-generalization --train ``` diff --git a/docs/Training-Imitation-Learning.md b/docs/Training-Imitation-Learning.md index ec91c86da0..b1475747b9 100644 --- a/docs/Training-Imitation-Learning.md +++ b/docs/Training-Imitation-Learning.md @@ -29,7 +29,7 @@ See PreTraining + GAIL + Curiosity + RL below. The ML-Agents toolkit provides several ways to learn from demonstrations. -* To train using GAIL (Generative Adversarial Imitaiton Learning) you can add the +* To train using GAIL (Generative Adversarial Imitation Learning) you can add the [GAIL reward signal](Reward-Signals.md#gail-reward-signal). GAIL can be used with or without environment rewards, and works well when there are a limited number of demonstrations. @@ -38,7 +38,7 @@ The ML-Agents toolkit provides several ways to learn from demonstrations. on the PPO trainer, in addition to using a small GAIL reward signal. * To train an agent to exactly mimic demonstrations, you can use the [Behavioral Cloning](Training-Behavioral-Cloning.md) trainer. Behavioral Cloning can be - used offline and online (in-editor), and learns very quickly. However, it usually is ineffective + used with demonstrations (in-editor), and learns very quickly. However, it usually is ineffective on more complex environments without a large number of demonstrations. ### How to Choose @@ -52,8 +52,7 @@ If you want to train purely from demonstrations, GAIL is generally the preferred if you have few (<10) episodes of demonstrations. An example of this is provided for the Crawler example environment under `CrawlerStaticLearning` in `config/gail_config.yaml`. -If you have plenty of demonstrations and/or a very simple environment, Behavioral Cloning -(online and offline) can be effective and quick. However, it cannot be combined with RL. +If you have plenty of demonstrations and/or a very simple environment, Offline Behavioral Cloning can be effective and quick. However, it cannot be combined with RL. ## Recording Demonstrations diff --git a/docs/Training-ML-Agents.md b/docs/Training-ML-Agents.md index a05e0eca9f..5cbe56f793 100644 --- a/docs/Training-ML-Agents.md +++ b/docs/Training-ML-Agents.md @@ -78,11 +78,15 @@ training run if you plan to view the statistics. You can view these statistics using TensorBoard during or after training by running the following command: ```sh -tensorboard --logdir=summaries +tensorboard --logdir=summaries --port 6006 ``` And then opening the URL: [localhost:6006](http://localhost:6006). +**Note:** The default port TensorBoard uses is 6006. If there is an existing session +running on port 6006 a new session can be launched on an open port using the --port +option. + When training is finished, you can find the saved model in the `models` folder under the assigned run-id — in the cats example, the path to the model would be `models/cob_1/CatsOnBicycles_cob_1.nn`. @@ -110,15 +114,12 @@ environment, you can set the following command line options when invoking the oldest checkpoint is deleted when saving a new checkpoint. Defaults to 5. * `--lesson=`: Specify which lesson to start with when performing curriculum training. Defaults to 0. -* `--load`: If set, the training code loads an already trained model to - initialize the neural network before training. The learning code looks for the - model in `models//` (which is also where it saves models at the end of - training). When not set (the default), the neural network weights are randomly - initialized and an existing model is not loaded. * `--num-runs=`: Sets the number of concurrent training sessions to perform. Default is set to 1. Set to higher values when benchmarking performance and multiple training sessions is desired. Training sessions are independent, and do not improve learning performance. +* `--num-envs=`: Specifies the number of concurrent Unity environment instances to + collect experiences from when training. Defaults to 1. * `--run-id=`: Specifies an identifier for each training run. This identifier is used to name the subdirectories in which the trained model and summary statistics are saved as well as the saved model itself. The default id @@ -129,6 +130,18 @@ environment, you can set the following command line options when invoking training. Defaults to 50000. * `--seed=`: Specifies a number to use as a seed for the random number generator used by the training code. +* `--env-args=`: Specify arguments for the executable environment. Be aware that + the standalone build will also process these as + [Unity Command Line Arguments](https://docs.unity3d.com/Manual/CommandLineArguments.html). + You should choose different argument names if you want to create environment-specific arguments. + All arguments after this flag will be passed to the executable. For example, setting + `mlagents-learn config/trainer_config.yaml --env-args --num-orcs 42` would result in + ` --num-orcs 42` passed to the executable. +* `--base-port`: Specifies the starting port. Each concurrent Unity environment instance + will get assigned a port sequentially, starting from the `base-port`. Each instance + will use the port `(base_port + worker_id)`, where the `worker_id` is sequential IDs + given to each instance from 0 to `num_envs - 1`. Default is 5005. __Note:__ When + training using the Editor rather than an executable, the base port will be ignored. * `--slow`: Specify this option to run the Unity environment at normal, game speed. The `--slow` mode uses the **Time Scale** and **Target Frame Rate** specified in the Academy's **Inference Configuration**. By default, training @@ -137,14 +150,11 @@ environment, you can set the following command line options when invoking [Academy Properties](Learning-Environment-Design-Academy.md#academy-properties). * `--train`: Specifies whether to train model or only run in inference mode. When training, **always** use the `--train` option. -* `--num-envs=`: Specifies the number of concurrent Unity environment instances to collect - experiences from when training. Defaults to 1. -* `--base-port`: Specifies the starting port. Each concurrent Unity environment instance will - get assigned a port sequentially, starting from the `base-port`. Each instance will use the - port `(base_port + worker_id)`, where the `worker_id` is sequential IDs given to each instance - from 0 to `num_envs - 1`. Default is 5005. -* `--docker-target-name=
`: The Docker Volume on which to store curriculum, - executable and model files. See [Using Docker](Using-Docker.md). +* `--load`: If set, the training code loads an already trained model to + initialize the neural network before training. The learning code looks for the + model in `models//` (which is also where it saves models at the end of + training). When not set (the default), the neural network weights are randomly + initialized and an existing model is not loaded. * `--no-graphics`: Specify this option to run the Unity executable in `-batchmode` and doesn't initialize the graphics driver. Use this only if your training doesn't involve visual observations (reading from Pixels). See @@ -152,59 +162,52 @@ environment, you can set the following command line options when invoking details. * `--debug`: Specify this option to enable debug-level logging for some parts of the code. * `--multi-gpu`: Setting this flag enables the use of multiple GPU's (if available) during training. -* `--env-args=`: Specify arguments for the executable environment. Be aware that - the standalone build will also process these as - [Unity Command Line Arguments](https://docs.unity3d.com/Manual/CommandLineArguments.html). - You should choose different argument names if you want to create environment-specific arguments. - All arguments after this flag will be passed to the executable. For example, setting - `mlagents-learn config/trainer_config.yaml --env-args --num-orcs 42` would result in - ` --num-orcs 42` passed to the executable. - +* `--cpu`: Forces training using CPU only. ### Training Config File The training config files `config/trainer_config.yaml`, `config/sac_trainer_config.yaml`, -`config/gail_config.yaml`, `config/online_bc_config.yaml` and `config/offline_bc_config.yaml` -specifies the training method, the hyperparameters, and a few additional values to use when -training with PPO, SAC, GAIL (with PPO), and online and offline BC. These files are divided into sections. -The **default** section defines the default values for all the available -settings. You can also add new sections to override these defaults to train -specific Brains. Name each of these override sections after the GameObject -containing the Brain component that should use these settings. (This GameObject -will be a child of the Academy in your scene.) Sections for the example -environments are included in the provided config file. +`config/gail_config.yaml` and `config/offline_bc_config.yaml` specifies the training method, +the hyperparameters, and a few additional values to use when training with Proximal Policy +Optimization(PPO), Soft Actor-Critic(SAC), GAIL (Generative Adversarial Imitation Learning) +with PPO, and online and offline Behavioral Cloning(BC)/Imitation. These files are divided +into sections. The **default** section defines the default values for all the available +training with PPO, SAC, GAIL (with PPO), and offline BC. These files are divided into sections. +The **default** section defines the default values for all the available settings. You can +also add new sections to override these defaults to train specific Behaviors. Name each of these +override sections after the appropriate `Behavior Name`. Sections for the +example environments are included in the provided config file. | **Setting** | **Description** | **Applies To Trainer\*** | | :------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :----------------------- | -| batch_size | The number of experiences in each iteration of gradient descent. | PPO, SAC, BC | +| batch_size | The number of experiences in each iteration of gradient descent. | PPO, SAC, BC | | batches_per_epoch | In imitation learning, the number of batches of training examples to collect before training the model. | BC | | beta | The strength of entropy regularization. | PPO | -| brain\_to\_imitate | For online imitation learning, the name of the GameObject containing the Brain component to imitate. | (online)BC | | demo_path | For offline imitation learning, the file path of the recorded demonstration file | (offline)BC | -| buffer_size | The number of experiences to collect before updating the policy model. In SAC, the max size of the experience buffer. | PPO, SAC | -| buffer_init_steps | The number of experiences to collect into the buffer before updating the policy model. | SAC | +| buffer_size | The number of experiences to collect before updating the policy model. In SAC, the max size of the experience buffer. | PPO, SAC | +| buffer_init_steps | The number of experiences to collect into the buffer before updating the policy model. | SAC | | epsilon | Influences how rapidly the policy can evolve during training. | PPO | -| hidden_units | The number of units in the hidden layers of the neural network. | PPO, SAC, BC | -| init_entcoef | How much the agent should explore in the beginning of training. | SAC | +| hidden_units | The number of units in the hidden layers of the neural network. | PPO, SAC, BC | +| init_entcoef | How much the agent should explore in the beginning of training. | SAC | | lambd | The regularization parameter. | PPO | -| learning_rate | The initial learning rate for gradient descent. | PPO, SAC, BC | -| max_steps | The maximum number of simulation steps to run during a training session. | PPO, SAC, BC | -| memory_size | The size of the memory an agent must keep. Used for training with a recurrent neural network. See [Using Recurrent Neural Networks](Feature-Memory.md). | PPO, SAC, BC | -| normalize | Whether to automatically normalize observations. | PPO, SAC | +| learning_rate | The initial learning rate for gradient descent. | PPO, SAC, BC | +| max_steps | The maximum number of simulation steps to run during a training session. | PPO, SAC, BC | +| memory_size | The size of the memory an agent must keep. Used for training with a recurrent neural network. See [Using Recurrent Neural Networks](Feature-Memory.md). | PPO, SAC, BC | +| normalize | Whether to automatically normalize observations. | PPO, SAC | | num_epoch | The number of passes to make through the experience buffer when performing gradient descent optimization. | PPO | -| num_layers | The number of hidden layers in the neural network. | PPO, SAC, BC | -| pretraining | Use demonstrations to bootstrap the policy neural network. See [Pretraining Using Demonstrations](Training-PPO.md#optional-pretraining-using-demonstrations). | PPO, SAC | -| reward_signals | The reward signals used to train the policy. Enable Curiosity and GAIL here. See [Reward Signals](Reward-Signals.md) for configuration options. | PPO, SAC, BC | -| save_replay_buffer | Saves the replay buffer when exiting training, and loads it on resume. | SAC | -| sequence_length | Defines how long the sequences of experiences must be while training. Only used for training with a recurrent neural network. See [Using Recurrent Neural Networks](Feature-Memory.md). | PPO, SAC, BC | -| summary_freq | How often, in steps, to save training statistics. This determines the number of data points shown by TensorBoard. | PPO, SAC, BC | -| tau | How aggressively to update the target network used for bootstrapping value estimation in SAC. | SAC | -| time_horizon | How many steps of experience to collect per-agent before adding it to the experience buffer. | PPO, SAC, (online)BC | -| trainer | The type of training to perform: "ppo", "sac", "offline_bc" or "online_bc". | PPO, SAC, BC | -| train_interval | How often to update the agent. | SAC | -| num_update | Number of mini-batches to update the agent with during each update. | SAC | -| use_recurrent | Train using a recurrent neural network. See [Using Recurrent Neural Networks](Feature-Memory.md). | PPO, SAC, BC | - +<<<<<<< HEAD +| num_layers | The number of hidden layers in the neural network. | PPO, SAC, BC | +| pretraining | Use demonstrations to bootstrap the policy neural network. See [Pretraining Using Demonstrations](Training-PPO.md#optional-pretraining-using-demonstrations). | PPO, SAC | +| reward_signals | The reward signals used to train the policy. Enable Curiosity and GAIL here. See [Reward Signals](Reward-Signals.md) for configuration options. | PPO, SAC, BC | +| save_replay_buffer | Saves the replay buffer when exiting training, and loads it on resume. | SAC | +| sequence_length | Defines how long the sequences of experiences must be while training. Only used for training with a recurrent neural network. See [Using Recurrent Neural Networks](Feature-Memory.md). | PPO, SAC, BC | +| summary_freq | How often, in steps, to save training statistics. This determines the number of data points shown by TensorBoard. | PPO, SAC, BC | +| tau | How aggressively to update the target network used for bootstrapping value estimation in SAC. | SAC | +| time_horizon | How many steps of experience to collect per-agent before adding it to the experience buffer. | PPO, SAC, (online)BC | +| trainer | The type of training to perform: "ppo", "sac", "offline_bc" or "online_bc". | PPO, SAC, BC | +| train_interval | How often to update the agent. | SAC | +| num_update | Number of mini-batches to update the agent with during each update. | SAC | +| use_recurrent | Train using a recurrent neural network. See [Using Recurrent Neural Networks](Feature-Memory.md). | PPO, SAC, BC | \*PPO = Proximal Policy Optimization, SAC = Soft Actor-Critic, BC = Behavioral Cloning (Imitation) diff --git a/docs/Training-PPO.md b/docs/Training-PPO.md index 5bbe577641..0a0ec6b61d 100644 --- a/docs/Training-PPO.md +++ b/docs/Training-PPO.md @@ -192,9 +192,9 @@ Typical Range: `32` - `512` `vis_encode_type` corresponds to the encoder type for encoding visual observations. Valid options include: * `simple` (default): a simple encoder which consists of two convolutional layers -* `nature_cnn`: CNN implementation proposed by Mnih et al.(https://www.nature.com/articles/nature14236), +* `nature_cnn`: [CNN implementation proposed by Mnih et al.](https://www.nature.com/articles/nature14236), consisting of three convolutional layers -* `resnet`: IMPALA Resnet implementation (https://arxiv.org/abs/1802.01561), +* `resnet`: [IMPALA Resnet implementation](https://arxiv.org/abs/1802.01561), consisting of three stacked layers, each with two residual blocks, making a much larger network than the other two. @@ -306,7 +306,7 @@ into the training process. ### Entropy -This corresponds to how random the decisions of a Brain are. This should +This corresponds to how random the decisions are. This should consistently decrease during training. If it decreases too soon or not at all, `beta` should be adjusted (when using discrete action space). diff --git a/docs/Training-SAC.md b/docs/Training-SAC.md index aa5d67e0ee..7a838ce516 100644 --- a/docs/Training-SAC.md +++ b/docs/Training-SAC.md @@ -197,10 +197,10 @@ Typical Range: `32` - `512` `vis_encode_type` corresponds to the encoder type for encoding visual observations. Valid options include: * `simple` (default): a simple encoder which consists of two convolutional layers -* `nature_cnn`: CNN implementation proposed by Mnih et al.(https://www.nature.com/articles/nature14236), +* `nature_cnn`: [CNN implementation proposed by Mnih et al.](https://www.nature.com/articles/nature14236), consisting of three convolutional layers -* `resnet`: IMPALA Resnet implementation (https://arxiv.org/abs/1802.01561), -consisting of three stacked layers, each with two risidual blocks, making a +* `resnet`: [IMPALA Resnet implementation](https://arxiv.org/abs/1802.01561), +consisting of three stacked layers, each with two residual blocks, making a much larger network than the other two. Options: `simple`, `nature_cnn`, `resnet` @@ -313,7 +313,7 @@ long to decrease, `init_entcoef` should be adjusted. ### Entropy -This corresponds to how random the decisions of a Brain are. This should +This corresponds to how random the decisions are. This should initially increase during training, reach a peak, and should decline along with the Entropy Coefficient. This is because in the beginning, the agent is incentivized to be more random for exploration due to a high entropy coefficient. diff --git a/docs/Training-Using-Concurrent-Unity-Instances.md b/docs/Training-Using-Concurrent-Unity-Instances.md index 14f5b18e76..e4d7acdabf 100644 --- a/docs/Training-Using-Concurrent-Unity-Instances.md +++ b/docs/Training-Using-Concurrent-Unity-Instances.md @@ -1,6 +1,6 @@ # Training Using Concurrent Unity Instances -As part of release v0.8, we enabled developers to run concurrent, parallel instances of the Unity executable during training. For certain scenarios, this should speed up the training. +As part of release v0.8, we enabled developers to run concurrent, parallel instances of the Unity executable during training. For certain scenarios, this should speed up the training. ## How to Run Concurrent Unity Instances During Training diff --git a/docs/Training-on-Amazon-Web-Service.md b/docs/Training-on-Amazon-Web-Service.md index dbba1d9199..4e6a210869 100644 --- a/docs/Training-on-Amazon-Web-Service.md +++ b/docs/Training-on-Amazon-Web-Service.md @@ -1,5 +1,8 @@ # Training on Amazon Web Service +Note: We no longer use this guide ourselves and so it may not work correctly. We've +decided to keep it up just in case it is helpful to you. + This page contains instructions for setting up an EC2 instance on Amazon Web Service for training ML-Agents environments. @@ -252,7 +255,7 @@ There is no data folder ### Unity Environment not responding -If you didn't setup X Server or hasn't launched it properly, or you didn't made your environment with external brain, or your environment somehow crashes, or you haven't `chmod +x` your Unity Environment, all of these will cause connection between Unity and Python to fail. Then you will see something like this: +If you didn't setup X Server or hasn't launched it properly, or your environment somehow crashes, or you haven't `chmod +x` your Unity Environment, all of these will cause connection between Unity and Python to fail. Then you will see something like this: ```console Logging to /home/ubuntu/.config/unity3d//Player.log @@ -265,7 +268,6 @@ Traceback (most recent call last): File "/home/ubuntu/ml-agents/ml-agents/mlagents/envs/rpc_communicator.py", line 60, in initialize mlagents.envs.exception.UnityTimeOutException: The Unity environment took too long to respond. Make sure that : The environment does not need user interaction to launch - The Academy and the External Brain(s) are attached to objects in the Scene The environment and the Python interface have compatible versions. ``` diff --git a/docs/Training-on-Microsoft-Azure-Custom-Instance.md b/docs/Training-on-Microsoft-Azure-Custom-Instance.md index 55df18fb28..18996c2d5e 100644 --- a/docs/Training-on-Microsoft-Azure-Custom-Instance.md +++ b/docs/Training-on-Microsoft-Azure-Custom-Instance.md @@ -40,7 +40,7 @@ This page contains instructions for setting up a custom Virtual Machine on Micro 6. Navigate to [http://developer.nvidia.com](http://developer.nvidia.com) and create an account and verify it. -7. Download (to your own computer) cuDNN from [this url](https://developer.nvidia.com/compute/machine-learning/cudnn/secure/v6/prod/8.0_20170307/Ubuntu16_04_x64/libcudnn6_6.0.20-1+cuda8.0_amd64-deb). +7. Download (to your own computer) cuDNN from [this url](https://developer.nvidia.com/compute/machine-learning/cudnn/secure/v6/prod/8.0_20170307/Ubuntu16_04_x64/libcudnn6_6.0.20-1+cuda8.0_amd64-deb). 8. Copy the deb package to your VM: diff --git a/docs/Training-on-Microsoft-Azure.md b/docs/Training-on-Microsoft-Azure.md index 521961bb5c..0aca651e8b 100644 --- a/docs/Training-on-Microsoft-Azure.md +++ b/docs/Training-on-Microsoft-Azure.md @@ -1,5 +1,8 @@ # Training on Microsoft Azure (works with ML-Agents toolkit v0.3) +Note: We no longer use this guide ourselves and so it may not work correctly. We've +decided to keep it up just in case it is helpful to you. + This page contains instructions for setting up training on Microsoft Azure through either [Azure Container Instances](https://azure.microsoft.com/services/container-instances/) @@ -107,9 +110,5 @@ training](Using-Tensorboard.md). [Azure Container Instances](https://azure.microsoft.com/services/container-instances/) allow you to spin up a container, on demand, that will run your training and then be shut down. This ensures you aren't leaving a billable VM running when -it isn't needed. You can read more about -[The ML-Agents toolkit support for Docker containers here](Using-Docker.md). -Using ACI enables you to offload training of your models without needing to -install Python and TensorFlow on your own computer. You can find instructions, -including a pre-deployed image in DockerHub for you to use, available -[here](https://github.com/druttka/unity-ml-on-azure). +it isn't needed. Using ACI enables you to offload training of your models without needing to +install Python and TensorFlow on your own computer. diff --git a/docs/Unity-Inference-Engine.md b/docs/Unity-Inference-Engine.md index 5a55ca85e8..fffedf2bf3 100644 --- a/docs/Unity-Inference-Engine.md +++ b/docs/Unity-Inference-Engine.md @@ -2,18 +2,18 @@ The ML-Agents toolkit allows you to use pre-trained neural network models inside your Unity games. This support is possible thanks to the Unity Inference -Engine. The Unity Inference Engine is using -[compute shaders](https://docs.unity3d.com/Manual/class-ComputeShader.html) -to run the neural network within Unity. +Engine. The Unity Inference Engine is using +[compute shaders](https://docs.unity3d.com/Manual/class-ComputeShader.html) +to run the neural network within Unity. ## Supported devices -Scripting Backends : The Unity Inference Engine is generally faster with +Scripting Backends : The Unity Inference Engine is generally faster with __IL2CPP__ than with __Mono__ for Standalone builds. -In the Editor, It is not possible to use the Unity Inference Engine with -GPU device selected when Editor Graphics Emulation is set to __OpenGL(ES) -3.0 or 2.0 emulation__. Also there might be non-fatal build time errors -when target platform includes Graphics API that does not support +In the Editor, It is not possible to use the Unity Inference Engine with +GPU device selected when Editor Graphics Emulation is set to __OpenGL(ES) +3.0 or 2.0 emulation__. Also there might be non-fatal build time errors +when target platform includes Graphics API that does not support __Unity Compute Shaders__. The Unity Inference Engine supposedly works on any Unity supported platform but we only tested for the following platforms : @@ -26,12 +26,10 @@ but we only tested for the following platforms : ## Using the Unity Inference Engine -When using a **Learning Brain**, drag the `.nn` file into the **Model** field -in the Inspector. -Uncheck the `Control` checkbox for the corresponding **Brain** in the -**BroadcastHub** of the Academy. +When using a model, drag the `.nn` file into the **Model** field +in the Inspector of the Agent. Select the **Inference Device** : CPU or GPU you want to use for Inference. **Note:** For most of the models generated with the ML-Agents toolkit, CPU will be faster than GPU. -You should use the GPU only if you use the +You should use the GPU only if you use the ResNet visual encoder or have a large number of agents with visual observations. diff --git a/docs/Using-Docker.md b/docs/Using-Docker.md deleted file mode 100644 index a7a14ef7a8..0000000000 --- a/docs/Using-Docker.md +++ /dev/null @@ -1,166 +0,0 @@ -# Using Docker For ML-Agents - -We currently offer a solution for Windows and Mac users who would like to do -training or inference using Docker. This option may be appealing to those who -would like to avoid installing Python and TensorFlow themselves. The current -setup forces both TensorFlow and Unity to _only_ rely on the CPU for -computations. Consequently, our Docker simulation does not use a GPU and uses -[`Xvfb`](https://en.wikipedia.org/wiki/Xvfb) to do visual rendering. `Xvfb` is a -utility that enables `ML-Agents` (or any other application) to do rendering -virtually i.e. it does not assume that the machine running `ML-Agents` has a GPU -or a display attached to it. This means that rich environments which involve -agents using camera-based visual observations might be slower. - -## Requirements - -- Unity _Linux Build Support_ Component -- [Docker](https://www.docker.com) - -## Setup - -- [Download](https://unity3d.com/get-unity/download) the Unity Installer and add - the _Linux Build Support_ Component - -- [Download](https://www.docker.com/community-edition#/download) and install - Docker if you don't have it setup on your machine. - -- Since Docker runs a container in an environment that is isolated from the host - machine, a mounted directory in your host machine is used to share data, e.g. - the trainer configuration file, Unity executable, curriculum files and - TensorFlow graph. For convenience, we created an empty `unity-volume` - directory at the root of the repository for this purpose, but feel free to use - any other directory. The remainder of this guide assumes that the - `unity-volume` directory is the one used. - -## Usage - -Using Docker for ML-Agents involves three steps: building the Unity environment -with specific flags, building a Docker container and, finally, running the -container. If you are not familiar with building a Unity environment for -ML-Agents, please read through our [Getting Started with the 3D Balance Ball -Example](Getting-Started-with-Balance-Ball.md) guide first. - -### Build the Environment (Optional) - -_If you want to used the Editor to perform training, you can skip this step._ - -Since Docker typically runs a container sharing a (linux) kernel with the host -machine, the Unity environment **has** to be built for the **linux platform**. -When building a Unity environment, please select the following options from the -the Build Settings window: - -- Set the _Target Platform_ to `Linux` -- Set the _Architecture_ to `x86_64` -- If the environment does not contain visual observations, you can select the - `headless` option here. - -Then click `Build`, pick an environment name (e.g. `3DBall`) and set the output -directory to `unity-volume`. After building, ensure that the file -`.x86_64` and subdirectory `_Data/` are -created under `unity-volume`. - -![Build Settings For Docker](images/docker_build_settings.png) - -### Build the Docker Container - -First, make sure the Docker engine is running on your machine. Then build the -Docker container by calling the following command at the top-level of the -repository: - -```sh -docker build -t . -``` - -Replace `` with a name for the Docker image, e.g. -`balance.ball.v0.1`. - -### Run the Docker Container - -Run the Docker container by calling the following command at the top-level of -the repository: - -```sh -docker run -it --name \ - --mount type=bind,source="$(pwd)"/unity-volume,target=/unity-volume \ - -p 5005:5005 \ - -p 6006:6006 \ - :latest \ - --docker-target-name=unity-volume \ - \ - --env= \ - --train \ - --run-id= -``` - -Notes on argument values: - -- `` is used to identify the container (in case you want to - interrupt and terminate it). This is optional and Docker will generate a - random name if this is not set. _Note that this must be unique for every run - of a Docker image._ -- `` references the image name used when building the container. -- `` __(Optional)__: If you are training with a linux - executable, this is the name of the executable. If you are training in the - Editor, do not pass a `` argument and press the - :arrow_forward: button in Unity when the message _"Start training by pressing - the Play button in the Unity Editor"_ is displayed on the screen. -- `source`: Reference to the path in your host OS where you will store the Unity - executable. -- `target`: Tells Docker to mount the `source` path as a disk with this name. -- `docker-target-name`: Tells the ML-Agents Python package what the name of the - disk where it can read the Unity executable and store the graph. **This should - therefore be identical to `target`.** -- `trainer-config-file`, `train`, `run-id`: ML-Agents arguments passed to - `mlagents-learn`. `trainer-config-file` is the filename of the trainer config - file, `train` trains the algorithm, and `run-id` is used to tag each - experiment with a unique identifier. We recommend placing the trainer-config - file inside `unity-volume` so that the container has access to the file. - -To train with a `3DBall` environment executable, the command would be: - -```sh -docker run -it --name 3DBallContainer.first.trial \ - --mount type=bind,source="$(pwd)"/unity-volume,target=/unity-volume \ - -p 5005:5005 \ - -p 6006:6006 \ - balance.ball.v0.1:latest 3DBall \ - --docker-target-name=unity-volume \ - trainer_config.yaml \ - --env=3DBall \ - --train \ - --run-id=3dball_first_trial -``` - -For more detail on Docker mounts, check out -[these](https://docs.docker.com/storage/bind-mounts/) docs from Docker. - -**NOTE** If you are training using docker for environments that use visual observations, you may need to increase the default memory that Docker allocates for the container. For example, see [here](https://docs.docker.com/docker-for-mac/#advanced) for instructions for Docker for Mac. - -### Running Tensorboard - -You can run Tensorboard to monitor your training instance on http://localhost:6006: - -```sh -docker exec -it tensorboard --logdir=/unity-volume/summaries --host=0.0.0.0 -``` - -With our previous 3DBall example, this command would look like this: -```sh -docker exec -it 3DBallContainer.first.trial tensorboard --logdir=/unity-volume/summaries --host=0.0.0.0 -``` - -For more details on Tensorboard, check out the documentation about [Using Tensorboard](Using-Tensorboard.md). - -### Stopping Container and Saving State - -If you are satisfied with the training progress, you can stop the Docker -container while saving state by either using `Ctrl+C` or `⌘+C` (Mac) or by using -the following command: - -```sh -docker kill --signal=SIGINT -``` - -`` is the name of the container specified in the earlier `docker -run` command. If you didn't specify one, you can find the randomly generated -identifier by running `docker container ls`. diff --git a/docs/Using-Tensorboard.md b/docs/Using-Tensorboard.md index aef5e48d91..b150a5bbe5 100644 --- a/docs/Using-Tensorboard.md +++ b/docs/Using-Tensorboard.md @@ -15,11 +15,15 @@ start TensorBoard: 3. From the command line run : ```sh - tensorboard --logdir=summaries + tensorboard --logdir=summaries --port=6006 ``` 4. Open a browser window and navigate to [localhost:6006](http://localhost:6006). +**Note:** The default port TensorBoard uses is 6006. If there is an existing session +running on port 6006 a new session can be launched on an open port using the --port +option. + **Note:** If you don't assign a `run-id` identifier, `mlagents-learn` uses the default string, "ppo". All the statistics will be saved to the same sub-folder and displayed as one session in TensorBoard. After a few runs, the displays can @@ -47,7 +51,7 @@ The ML-Agents training program saves the following statistics: * `Environment/Cumulative Reward` - The mean cumulative episode reward over all agents. Should increase during a successful training session. - + * `Environment/Episode Length` - The mean length of each episode in the environment for all agents. ### Policy Statistics @@ -58,7 +62,7 @@ The ML-Agents training program saves the following statistics: * `Policy/Learning Rate` (PPO; BC) - How large a step the training algorithm takes as it searches for the optimal policy. Should decrease over time. - + * `Policy/Value Estimate` (PPO) - The mean value estimate for all states visited by the agent. Should increase during a successful training session. * `Policy/Curiosity Reward` (PPO+Curiosity) - This corresponds to the mean cumulative intrinsic reward generated per-episode. @@ -81,5 +85,5 @@ The ML-Agents training program saves the following statistics: * `Losses/Inverse Loss` (PPO+Curiosity) - The mean magnitude of the forward model loss function. Corresponds to how well the model is able to predict the action taken between two observations. - + * `Losses/Cloning Loss` (BC) - The mean magnitude of the behavioral cloning loss. Corresponds to how well the model imitates the demonstration data. diff --git a/docs/Using-Virtual-Environment.md b/docs/Using-Virtual-Environment.md new file mode 100644 index 0000000000..1bcb46fa35 --- /dev/null +++ b/docs/Using-Virtual-Environment.md @@ -0,0 +1,53 @@ +# Using Virtual Environment + +## What is a Virtual Environment? +A Virtual Environment is a self contained directory tree that contains a Python installation +for a particular version of Python, plus a number of additional packages. To learn more about +Virtual Environments see [here](https://docs.python.org/3/library/venv.html) + +## Why should I use a Virtual Environment? +A Virtual Environment keeps all dependencies for the Python project separate from dependencies +of other projects. This has a few advantages: +1. It makes dependency management for the project easy. +1. It enables using and testing of different library versions by quickly +spinning up a new environment and verifying the compatibility of the code with the +different version. + +Requirement - Python 3.6 must be installed on the machine you would like +to run ML-Agents on (either local laptop/desktop or remote server). Python 3.6 can be +installed from [here](https://www.python.org/downloads/). + + +## Installing Pip (Required) + +1. Download the `get-pip.py` file using the command `curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py` +1. Run the following `python3 get-pip.py` +1. Check pip version using `pip3 -V` + +Note (for Ubuntu users): If the `ModuleNotFoundError: No module named 'distutils.util'` error is encountered, then +python3-distutils needs to be installed. Install python3-distutils using `sudo apt-get install python3-distutils` + +## Mac OS X Setup + +1. Create a folder where the virtual environments will reside `$ mkdir ~/python-envs` +1. To create a new environment named `sample-env` execute `$ python3 -m venv ~/python-envs/sample-env` +1. To activate the environment execute `$ source ~/python-envs/sample-env/bin/activate` +1. Verify pip version is the same as in the __Installing Pip__ section. In case it is not the latest, upgrade to +the latest pip version using `pip3 install --upgrade pip` +1. Install ML-Agents package using `$ pip3 install mlagents` +1. To deactivate the environment execute `$ deactivate` + +## Ubuntu Setup + +1. Install the python3-venv package using `$ sudo apt-get install python3-venv` +1. Follow the steps in the Mac OS X installation. + +## Windows Setup + +1. Create a folder where the virtual environments will reside `$ md python-envs` +1. To create a new environment named `sample-env` execute `$ python3 -m venv python-envs\sample-env` +1. To activate the environment execute `$ python-envs\sample-env\Scripts\activate` +1. Verify pip version is the same as in the __Installing Pip__ section. In case it is not the latest, upgrade to +the latest pip version using `pip3 install --upgrade pip` +1. Install ML-Agents package using `$ pip3 install mlagents` +1. To deactivate the environment execute `$ deactivate` diff --git a/docs/images/3dball_learning_brain.png b/docs/images/3dball_learning_brain.png index 1f4a4440ed..eca51b789a 100644 Binary files a/docs/images/3dball_learning_brain.png and b/docs/images/3dball_learning_brain.png differ diff --git a/docs/images/academy.png b/docs/images/academy.png index 62f3e5f8e5..f0dc135419 100644 Binary files a/docs/images/academy.png and b/docs/images/academy.png differ diff --git a/docs/images/agent.png b/docs/images/agent.png deleted file mode 100644 index 1918afe54d..0000000000 Binary files a/docs/images/agent.png and /dev/null differ diff --git a/docs/images/brain.png b/docs/images/brain.png deleted file mode 100644 index b7e45cdfb7..0000000000 Binary files a/docs/images/brain.png and /dev/null differ diff --git a/docs/images/broadcast.png b/docs/images/broadcast.png deleted file mode 100644 index 5428110aef..0000000000 Binary files a/docs/images/broadcast.png and /dev/null differ diff --git a/docs/images/mlagents-3DBallHierarchyOld.png b/docs/images/mlagents-3DBallHierarchyOld.png deleted file mode 100644 index a853848a48..0000000000 Binary files a/docs/images/mlagents-3DBallHierarchyOld.png and /dev/null differ diff --git a/docs/images/mlagents-NewTutAcademy.png b/docs/images/mlagents-NewTutAcademy.png index d3bf3289a8..1af2dfb747 100644 Binary files a/docs/images/mlagents-NewTutAcademy.png and b/docs/images/mlagents-NewTutAcademy.png differ diff --git a/docs/images/mlagents-NewTutAssignBrain.png b/docs/images/mlagents-NewTutAssignBrain.png deleted file mode 100644 index b657046c88..0000000000 Binary files a/docs/images/mlagents-NewTutAssignBrain.png and /dev/null differ diff --git a/docs/images/mlagents-NewTutBrain.png b/docs/images/mlagents-NewTutBrain.png deleted file mode 100644 index 23a5093d81..0000000000 Binary files a/docs/images/mlagents-NewTutBrain.png and /dev/null differ diff --git a/docs/images/mlagents-SetBrainToTrain.png b/docs/images/mlagents-SetBrainToTrain.png deleted file mode 100644 index 9fa8347e3d..0000000000 Binary files a/docs/images/mlagents-SetBrainToTrain.png and /dev/null differ diff --git a/docs/images/player_brain.png b/docs/images/player_brain.png deleted file mode 100644 index 043f0d9c1b..0000000000 Binary files a/docs/images/player_brain.png and /dev/null differ diff --git a/docs/images/visual-observation-combination.png b/docs/images/visual-observation-combination.png deleted file mode 100644 index a40b37752c..0000000000 Binary files a/docs/images/visual-observation-combination.png and /dev/null differ diff --git a/docs/images/visual-observation-rendertexture.png b/docs/images/visual-observation-rendertexture.png index d2f8c7f662..b609f14653 100644 Binary files a/docs/images/visual-observation-rendertexture.png and b/docs/images/visual-observation-rendertexture.png differ diff --git a/docs/images/visual-observation.png b/docs/images/visual-observation.png index bfc3144049..77e33962a9 100644 Binary files a/docs/images/visual-observation.png and b/docs/images/visual-observation.png differ diff --git a/docs/localized/KR/README.md b/docs/localized/KR/README.md index 5a3709847f..1b27a4a04e 100644 --- a/docs/localized/KR/README.md +++ b/docs/localized/KR/README.md @@ -7,11 +7,11 @@ [![license badge](https://img.shields.io/badge/license-Apache--2.0-green.svg)](LICENSE) **Unity Machine Learning Agents Toolkit** (ML-Agents) 은 지능형 에이전트를 학습시키기 위한 -환경을 제공하여 게임 또는 시뮬레이션을 만들 수 있게 해주는 오픈소스 유니티 플러그인 입니다. 사용하기 쉬운 +환경을 제공하여 게임 또는 시뮬레이션을 만들 수 있게 해주는 오픈소스 유니티 플러그인 입니다. 사용하기 쉬운 파이썬 API를 통해 강화학습, 모방학습, 신경진화 또는 다른 기계학습 방법론을 사용하여 에이전트들을 학습시킬 수 있습니다. 우리는 또한 게임 개발자와 개발에 대해 취미를 가지신 분들이 2D, 3D 그리고 VR/AR 게임들에 사용할 지능형 에이전트를 쉽게 훈련시킬 수 있도록하는 최신 알고리즘 구현체를 ([텐서플로우]([https://www.tensorflow.org/](https://www.tensorflow.org/)) 기반)을 제공합니다. 학습된 에이전트들은 -NPC의 행동 제어(다중 에이전트, 적대적 에이전트 등), 게임 빌드 테스트 자동화, 그리고 출시 전 게임 설계 검증 등을 포함한 다양한 목적을 위해 사용될 수 있습니다. +NPC의 행동 제어(다중 에이전트, 적대적 에이전트 등), 게임 빌드 테스트 자동화, 그리고 출시 전 게임 설계 검증 등을 포함한 다양한 목적을 위해 사용될 수 있습니다. ML-Agents toolkit은 유니티의 풍부한 환경에서 인공지능 에이전트 개발을 위한 중심 플랫폼을 제공함으로써 더욱 광범위한 연구와 게임 개발이 진행되도록 하며 이에 따라 게임 개발자들과 AI 연구원들 모두에게 도움을 줍니다. ## 특징 @@ -53,13 +53,13 @@ ML-Agents toolkit은 유니티의 풍부한 환경에서 인공지능 에이전 ## 커뮤니티 그리고 피드백 -ML-Agents toolkit은 오픈소스 프로젝트이며 컨트리뷰션을 환영합니다. 만약 컨트리뷰션을 원하시는 경우 +ML-Agents toolkit은 오픈소스 프로젝트이며 컨트리뷰션을 환영합니다. 만약 컨트리뷰션을 원하시는 경우 [컨트리뷰션 가이드라인](CONTRIBUTING.md)과 [행동 규칙](CODE_OF_CONDUCT.md)을 검토해주십시오. 만약 ML-Agents toolkit을 사용하며 문제가 생긴다면, 가능한 많은 세부 사항을 포함하여 [이슈 제출](https://github.com/Unity-Technologies/ml-agents/issues)을 해주십시오. 여러분의 의견은 저희에게 매우 중요합니다. Unity ML-Agents Toolkit에 관련된 여러분의 의견을 통해서 저희는 계속해서 -발전하고 성장할 수 있습니다. 단 몇 분만 사용하여 [저희에게 알려주세요](https://github.com/Unity-Technologies/ml-agents/issues/1454). +발전하고 성장할 수 있습니다. 단 몇 분만 사용하여 [저희에게 알려주세요](https://github.com/Unity-Technologies/ml-agents/issues/1454). 다른 의견과 피드백은 ML-Agents 팀과 직접 연락부탁드립니다. (ml-agents@unity3d.com) diff --git a/docs/localized/KR/docs/Installation-Windows.md b/docs/localized/KR/docs/Installation-Windows.md index fde671f589..8d46eaec17 100644 --- a/docs/localized/KR/docs/Installation-Windows.md +++ b/docs/localized/KR/docs/Installation-Windows.md @@ -1,18 +1,18 @@ # Windows ڸ ML-Agents Toolkit ġ ML-Agents toolkit Windows 10 մϴ. ٸ Windows ε ML-Agents toolkit - ʾҽϴ. , ML-Agents toolkit Windows VM(Bootcamp Ǵ ó + ʾҽϴ. , ML-Agents toolkit Windows VM(Bootcamp Ǵ ó ȯ ) ʾҽϴ . ML-Agents toolkit ϱ , Ʒ Ȱ ó Python 䱸Ǵ Python Ű ġؾ մϴ. - ̵ GPU н(ڸ ) ٷϴ. + ̵ GPU н(ڸ ) ٷϴ. , ML-Agents toolkit GPU н ʿ Ǵ Ư ׿ ʿ ֽϴ. ## ܰ 1: Anaconda Python ġ Windows Anaconda [ٿε](https://www.anaconda.com/download/#windows)ϰ ġϽʽÿ. Anaconda ν, ٸ Python и ȯ濡 ֽϴ. -Python 2 ̻ ʱ Python 3.5 Ǵ 3.6 ʿմϴ. ̵忡 츮 +Python 2 ̻ ʱ Python 3.5 Ǵ 3.6 ʿմϴ. ̵忡 츮 Python 3.6 Anaconda 5.1 Դϴ. ([64-bit](https://repo.continuum.io/archive/Anaconda3-5.1.0-Windows-x86_64.exe) Ǵ [32-bit](https://repo.continuum.io/archive/Anaconda3-5.1.0-Windows-x86.exe) @@ -37,7 +37,7 @@ Windows Ž "conda is not recognized as internal or external command" Դϴ. ̸ ذϱ Ȯ ȯ ʿմϴ. -Ž â `ȯ ` Ÿ Ͽ ( Ű ų Ʒ ư ֽϴ). +Ž â `ȯ ` Ÿ Ͽ ( Ű ų Ʒ ư ֽϴ). __ý ȯ __ ɼ ҷɴϴ.

@@ -94,7 +94,7 @@ pip install tensorflow==1.7.1 ## ܰ 3: ʼ ̽ Ű ġ -ML-Agents toolkit ̽ Ű Դϴ. `pip` Ͽ ̽ Ӽ ġϽʽÿ. +ML-Agents toolkit ̽ Ű Դϴ. `pip` Ͽ ̽ Ӽ ġϽʽÿ. ML-Agents Toolkit Ұ ǻͿ Ǿ ʾҴٸ Ͻʽÿ. Git ([ٿε](https://git-scm.com/download/win))ϰ Ų ɾ Anaconda Ʈâ ԷϿ ֽϴ. _( Ʈ â ִٸ `activate ml-agents` ŸϿ @@ -110,7 +110,7 @@ git clone https://github.com/Unity-Technologies/ml-agents.git `ml-agents` 丮 Ƽ ȯ ԰ ϴ ȭн Ʈ̳ ̽ Ű ԵǾ ֽϴ. -`ml-agents-envs` 丮 `ml-agents` Ű ӵǴ Ƽ ̽ ̽ API ԵǾ ֽϴ. +`ml-agents-envs` 丮 `ml-agents` Ű ӵǴ Ƽ ̽ ̽ API ԵǾ ֽϴ. `gym-unity` 丮 OpenAI Gym ̽ Ű ԵǾ ֽϴ. @@ -133,18 +133,18 @@ pip install mlagents --no-cache-dir `--no-cache-dir` pip ij Ȱȭ Ѵٴ Դϴ. -### ġ +### ġ `ml-agents` Ǵ `ml-agents-envs` ϰ ʹٸ, PyPi ƴ ҷ Ű ġؾ մϴ. -̸ , `ml-agents` `ml-agents-envs` ġؾ մϴ. - - `C:\Downloads` ġ ֽϴ. ϰų ٿε +̸ , `ml-agents` `ml-agents-envs` ġؾ մϴ. + + `C:\Downloads` ġ ֽϴ. ϰų ٿε Anaconda Ʈ ml-agents 丮 ml-agents 丮 Ͻʽÿ: ```console cd C:\Downloads\ml-agents ``` - + 丮 Ͻʽÿ: ```console @@ -155,11 +155,11 @@ cd ml-agents pip install -e . ``` -`-e` ÷׸ Ͽ pip ϸ ̽ ְ `mlagents-learn` ݿ˴ϴ. +`-e` ÷׸ Ͽ pip ϸ ̽ ְ `mlagents-learn` ݿ˴ϴ. `mlagents` Ű `mlagents_envs` ̰, ٸ ġϸ PyPi `mlagents_envs` ġ ֱ - Ű ġϴ ߿մϴ. + Ű ġϴ ߿մϴ. -## (ɼ) Step 4: ML-Agents Toolkit GPU н +## (ɼ) Step 4: ML-Agents Toolkit GPU н ML-Agents toolkit GPU ʿ н ߿ PPO ˰ ӵ ũ մϴ( Ŀ GPU ֽϴ). ̵ GPU н ϰ ڸ ̵ Դϴ. GPU CUDA ȣȯǴ Ȯؾ մϴ. @@ -170,7 +170,7 @@ ML-Agents toolkit ### Nvidia CUDA toolkit ġ Nvidia ī̺꿡 CUDA Ŷ(toolkit) 9.0 [ٿε](https://developer.nvidia.com/cuda-toolkit-archive)ϰ ġϽʽÿ. -ML-Agents toolkit Ű CUDA Ŷ GPU ̺귯, +ML-Agents toolkit Ű CUDA Ŷ GPU ̺귯, -ȭ , C/C++(־ Ʃ 2017) Ϸ, Ÿ ̺귯 մϴ. ̵忡 [9.0.176](https://developer.nvidia.com/compute/cuda/9.0/Prod/network_installers/cuda_9.0.176_win10_network-exe)) մϴ. @@ -181,7 +181,7 @@ ML-Agents toolkit ### Nvidia cuDNN ̺귯 ġ -Nvidia cuDNN ̺귯 [ٿε](https://developer.nvidia.com/cudnn)ϰ ġϽʽÿ. +Nvidia cuDNN ̺귯 [ٿε](https://developer.nvidia.com/cudnn)ϰ ġϽʽÿ. cuDNN Ű ⺻ Ǵ GPU ̺귯. ٿε Nvidia Developer Program ؾ Դϴ().

@@ -192,11 +192,11 @@ cuDNN ϰ cuDNN [ٿε ](https://developer.nvidia.com/cudnn) ưʽÿ. ª 翡 ؾ ֽϴ. When you get to the list -cuDNN Ʈ __ܰ 1 ġ CUDA Ŷ ´ ٿεϰ ִ ȮϽʽÿ.__ ̵忡, +cuDNN Ʈ __ܰ 1 ġ CUDA Ŷ ´ ٿεϰ ִ ȮϽʽÿ.__ ̵忡, CUDA Ŷ 9.0 7.0.5 մϴ ([ٿε ũ](https://developer.nvidia.com/compute/machine-learning/cudnn/secure/v7.0.5/prod/9.0_20171129/cudnn-9.0-windows10-x64-v7)). -cuDNN ٿε Ŀ, CUDA Ŷ 丮ȿ ( )ؾ մϴ. +cuDNN ٿε Ŀ, CUDA Ŷ 丮ȿ ( )ؾ մϴ. cuDNN zip ȿ `bin`, `include`, ׸ `lib` ֽϴ.

@@ -205,7 +205,7 @@ cuDNN zip width="500" border="10" />

- CUDA Ŷ 丮ȿ Ͻʽÿ. + CUDA Ŷ 丮ȿ Ͻʽÿ. CUDA Ŷ 丮 `C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v9.0` ġ ֽϴ.

@@ -218,7 +218,7 @@ CUDA 1 ȯ 2 ߰ؾ մϴ. -ȯ ϱ , Ž â `ȯ ` Ÿ Ͽ ( Ű ų Ʒ ư ֽϴ). +ȯ ϱ , Ž â `ȯ ` Ÿ Ͽ ( Ű ų Ʒ ư ֽϴ). __ý ȯ __ ɼ ҷɴϴ.

diff --git a/docs/localized/KR/docs/Installation.md b/docs/localized/KR/docs/Installation.md index 44b50d3f36..80354847bf 100644 --- a/docs/localized/KR/docs/Installation.md +++ b/docs/localized/KR/docs/Installation.md @@ -16,7 +16,7 @@ Clone(복제)하고 추가종속성을 가지는 Python(파이썬)을 설치해

## Windows 사용자 -Windows에서 환경을 설정하기 위해, [세부 사항](Installation-Windows.md)에 설정 방법에 대해 작성하였습니다. +Windows에서 환경을 설정하기 위해, [세부 사항](Installation-Windows.md)에 설정 방법에 대해 작성하였습니다. Mac과 Linux는 다음 가이드를 확인해주십시오. ## Mac 또는 Unix 사용자 @@ -58,14 +58,14 @@ Python 3.6이 만약 설치되어 있지 않다면, [다운로드](https://www.p pip3 install mlagents ``` -이 명령어를 통해 PyPi로 부터(복제된 저장소가 아닌) `ml-agents`가 설치될 것입니다. +이 명령어를 통해 PyPi로 부터(복제된 저장소가 아닌) `ml-agents`가 설치될 것입니다. 만약 성공적으로 설치를 완료 했다면, `mlagents-learn --help` 명령어를 실행할 수 있을 것입니다. -명령어를 실행하면 유니티 로고와 `mlagents-learn`에서 사용할 수 있는 명령어 라인 매개변수들을 볼 수 있습니다. +명령어를 실행하면 유니티 로고와 `mlagents-learn`에서 사용할 수 있는 명령어 라인 매개변수들을 볼 수 있습니다. **주의:** - 현재 Python 3.7 또는 Python 3.5을 지원하지 않습니다. -- 만약 Anaconda를 사용하고 TensorFlow에 문제가 있다면, 다음 +- 만약 Anaconda를 사용하고 TensorFlow에 문제가 있다면, 다음 [링크](https://www.tensorflow.org/install/pip)에서 Anaconda 환경에서 어떻게 TensorFlow를 설치하는지 확인하십시오. ### 개발을 위한 설치방법 @@ -82,7 +82,7 @@ pip3 install -e ./ `-e` 플래그를 사용하여 pip를 실행 하면 파이썬 파일을 직접 변경할 수 있고 `mlagents-learn`를 실행할 때 반영됩니다. `mlagents` 패키지가 `mlagents_envs`에 의존적이고, 다른 순서로 설치하면 PyPi로 부터 `mlagents_envs`를 -설치할 수 있기 때문에 이 순서대로 패키지를 설치하는 것은 중요합니다. +설치할 수 있기 때문에 이 순서대로 패키지를 설치하는 것은 중요합니다. ## 도커 기반 설치 @@ -90,7 +90,7 @@ pip3 install -e ./ ## 다음 단계 -[기초 가이드](Basic-Guide.md) 페이지에는 유니티 내에서 ML-Agents toolkit의 설정 및 학습된 모델 실행, +[기초 가이드](Basic-Guide.md) 페이지에는 유니티 내에서 ML-Agents toolkit의 설정 및 학습된 모델 실행, 환경 구축, 학습 방법에 대한 여러 짧은 튜토리얼을 포함하고 있습니다. ## 도움말 diff --git a/docs/localized/KR/docs/Training-Imitation-Learning.md b/docs/localized/KR/docs/Training-Imitation-Learning.md index dc4a24b9d1..9513c08f53 100644 --- a/docs/localized/KR/docs/Training-Imitation-Learning.md +++ b/docs/localized/KR/docs/Training-Imitation-Learning.md @@ -6,7 +6,7 @@ 유니티 에디터를 이용하여 에이전트의 플레이를 기록하고 에셋으로 저장하는 것이 가능합니다. 이런 플레이 데이터에는 기록을 진행하는 동안의 관측, 행동 그리고 보상 정보가 포함됩니다. 이것들은 데이터를 통해 관리가 가능하며 Behavioral Cloning과 같은 오프라인 학습에 사용될 수 있습니다. (아래 내용 참고) -에이전트의 플레이 데이터를 기록하기 위해서는 씬(Scene)에서 `Agent` 컴포넌트를 포함하고 있는 GameObject에 `Demonstration Recorder` 컴포넌트를 추가해주어야 합니다. 일단 추가되고나면 에이전트로부터 플레이 데이터를 기록할 수 있게 됩니다. +에이전트의 플레이 데이터를 기록하기 위해서는 씬(Scene)에서 `Agent` 컴포넌트를 포함하고 있는 GameObject에 `Demonstration Recorder` 컴포넌트를 추가해주어야 합니다. 일단 추가되고나면 에이전트로부터 플레이 데이터를 기록할 수 있게 됩니다.

-`Record`가 체크되는 경우 씬이 실행되면 데이터가 생성됩니다. 환경의 난이도에 따라 모방학습에 사용하기 위해 몇분에서 몇시간 정도 플레이 데이터를 수집해야합니다. 충분한 데이터가 기록되었으면 유니티 상에서 게임의 실행을 정지합니다. 그렇게 하면 `.demo` 파일이 `Assets/Demonstations` 폴더 내부에 생성됩니다. 이 파일에는 에이전트의 플레이 데이터가 저장되어 있습니다. 이 파일을 클릭하면 인스펙터 상에 데모 파일에 대한 정보를 아래와 같이 알려줍니다. +`Record`가 체크되는 경우 씬이 실행되면 데이터가 생성됩니다. 환경의 난이도에 따라 모방학습에 사용하기 위해 몇분에서 몇시간 정도 플레이 데이터를 수집해야합니다. 충분한 데이터가 기록되었으면 유니티 상에서 게임의 실행을 정지합니다. 그렇게 하면 `.demo` 파일이 `Assets/Demonstations` 폴더 내부에 생성됩니다. 이 파일에는 에이전트의 플레이 데이터가 저장되어 있습니다. 이 파일을 클릭하면 인스펙터 상에 데모 파일에 대한 정보를 아래와 같이 알려줍니다.

\ 이것은 선택사항이며 설정하지 않았을 경우 도커는 랜덤한 이름을 생성합니다. _도커 이미지를 실행할 때마다 고유한 이름을 가져야함에 유의하십시오._ - `` 컨테이너를 빌드할 때 사용할 image name을 참조합니다. -- `` __(옵션)__: 리눅스 실행파일과 함께 학습을 할 경우, 인수 값이 실행파일의 이름이 된다. +- `` __(옵션)__: 리눅스 실행파일과 함께 학습을 할 경우, 인수 값이 실행파일의 이름이 된다. 에디터에서 학습을 할 경우, `` 인수를 전달하지 말고 유니티에서 _"Start training by pressing the Play button in the Unity Editor"_ 메세지가 화면에 표시될 때 :arrow_forward: 버튼을 누르십시오. - `source`: 유니티 실행파일을 저장할 호스트 운영체제의 경로를 참조합니다. @@ -85,7 +85,7 @@ docker run --name \ - `docker-target-name`: ML-Agents 파이썬 패키지에게 유니티 실행파일을 읽고 그래프를 저장할 수 있는 디스크의 이름을 알려준다. **그러므로 `target`과 동일한 값을 가져야 합니다.** - `trainer-config-file`, `train`, `run-id`: ML-Agents 인자들은 `mlagents-learn`로 전달됩니다. 트레이너 설정 파일의 이름 `trainer-config-file`, -알고리즘을 학습하는 `train`, 그리고 각 실험에 고유한 식별자를 태깅하는데 사용되는 `run-id`. +알고리즘을 학습하는 `train`, 그리고 각 실험에 고유한 식별자를 태깅하는데 사용되는 `run-id`. 컨테이너가 파일에 접근할 수 있도록 trainer-config 파일을 `unity-volume` 안에 둘 것을 권장합니다. `3DBall` 환경 실행파일을 학습하기 위해 다음 명령어가 사용됩니다: diff --git a/docs/localized/zh-CN/README.md b/docs/localized/zh-CN/README.md index 20ef6a4cdd..a5c93ce8f6 100755 --- a/docs/localized/zh-CN/README.md +++ b/docs/localized/zh-CN/README.md @@ -5,7 +5,7 @@ **注意:** 本文档为v0.3版本文档的部分翻译版,目前并不会随着英文版文档更新而更新。若要查看更新更全的英文版文档,请查看[这里](https://github.com/Unity-Technologies/ml-agents)。 **Unity Machine Learning Agents** (ML-Agents) 是一款开源的 Unity 插件, -使得我们得以在游戏环境和模拟环境中训练智能 agent。您可以使用 reinforcement learning(强化学习)、imitation learning(模仿学习)、neuroevolution(神经进化)或其他机器学习方法, 通过简单易用的 Python API进行控制,对 Agent 进行训练。我们还提供最先进算法的实现方式(基于 +使得我们得以在游戏环境和模拟环境中训练智能 agent。您可以使用 reinforcement learning(强化学习)、imitation learning(模仿学习)、neuroevolution(神经进化)或其他机器学习方法, 通过简单易用的 Python API进行控制,对 Agent 进行训练。我们还提供最先进算法的实现方式(基于 TensorFlow),让游戏开发者和业余爱好者能够轻松地 训练用于 2D、3D 和 VR/AR 游戏的智能 agent。 这些经过训练的 agent 可用于多种目的, @@ -57,7 +57,7 @@ ML-Agents 是一个开源项目,我们鼓励并欢迎大家贡献自己的力 [行为准则](/CODE_OF_CONDUCT.md)。 您可以通过 Unity Connect 和 GitHub 与我们以及更广泛的社区进行交流: -* 加入我们的 +* 加入我们的 [Unity 机器学习频道](https://connect.unity.com/messages/c/035fba4f88400000) 与使用 ML-Agents 的其他人以及对机器学习充满热情的 Unity 开发者 交流。我们使用该频道来展示关于 ML-Agents diff --git a/docs/localized/zh-CN/docs/Getting-Started-with-Balance-Ball.md b/docs/localized/zh-CN/docs/Getting-Started-with-Balance-Ball.md index 48776e37a5..3feade860e 100755 --- a/docs/localized/zh-CN/docs/Getting-Started-with-Balance-Ball.md +++ b/docs/localized/zh-CN/docs/Getting-Started-with-Balance-Ball.md @@ -37,9 +37,9 @@ agent 是一种观测并与_环境_交互的 **注意:**在 Unity 中,场景内所有元素的基础对象均为 _游戏对象_(GameObject)。游戏对象本质上是其他任何元素 (包括行为、图形、物理等)的容器。要查看组成游戏对象的组件, -请在 Scene 窗口中选择 GameObject,然后打开 +请在 Scene 窗口中选择 GameObject,然后打开 Inspector 窗口。Inspector 会显示游戏对象上的每个组件。 - + 在打开 3D Balance Ball 场景后,您可能会首先注意到它包含的 不是一个平台,而是多个平台。场景中的每个平台都是 独立的 agent,但它们全部共享同一个 Brain。3D Balance Ball 通过 @@ -51,15 +51,15 @@ Inspector 窗口。Inspector 会显示游戏对象上的每个组件。 当您在 Inspector 中查看该 Academy 组件时,可以看到若干 用于控制环境工作方式的属性。例如,Inspector中可以看到 **Training** 和 **Inference Configuration** 属性, 在其中我们可以设置之后生成的 Unity 可执行文件的 -图形和 Time Scale 属性。Academy 在训练期间使用 -**Training Configuration**,而在不训练时使用 +图形和 Time Scale 属性。Academy 在训练期间使用 +**Training Configuration**,而在不训练时使用 **Inference Configuration**。(*Inference* 等同于**不**进行训练的任何时候,此时 agent 可以使用经过训练的模型控制,或用写定的代码控制,或让玩家直接控制。) 通常情况下,您需要为 **Training configuration** 设置低图形质量 -和高Time Scale,而为 **Inference Configuration** 设置高图形质量和 +和高Time Scale,而为 **Inference Configuration** 设置高图形质量和 `1.0` 的Time Scale。 -**注意:**如果您想在训练期间观测环境,则可以调整 -**Inference Configuration** 设置来使用更大的窗口和更接近 +**注意:**如果您想在训练期间观测环境,则可以调整 +**Inference Configuration** 设置来使用更大的窗口和更接近 1:1 的时间刻度。当你要正式训练时一定要重新设置这些参数; 否则,训练可能需要很长时间。 @@ -68,7 +68,7 @@ Inspector 窗口。Inspector 会显示游戏对象上的每个组件。 您可以实现以下三个函数,但这些函数都是可选的: * Academy.InitializeAcademy() — 启动环境时调用一次。 -* Academy.AcademyStep() — 在 +* Academy.AcademyStep() — 在 Agent.AgentAction() 之前(以及 agent 收集其观测结果之后)的每个模拟步骤调用。 * Academy.AcademyReset() — 在 Academy 开始或重新开始模拟 (包括第一次)时调用。 @@ -80,7 +80,7 @@ Agent.AgentAction() 之前(以及 agent 收集其观测结果之后)的每 ### Brain 场景中的 Ball3DBrain 游戏对象包含 Brain 组件, -是 Academy 对象的子级。(场景中的所有 Brain 对象都必须是 +是 Academy 对象的子级。(场景中的所有 Brain 对象都必须是 Academy 的子级。)3D Balance Ball 环境中的所有 agent 使用 同一个 Brain 实例。 Brain 不存储关于 agent 的任何信息, @@ -99,7 +99,7 @@ Brain 不存储关于 agent 的任何信息, 实现自己的 CoreBrain 来创建自有的类型。 在本教程中,进行训练时,需要将 **Brain Type** 设置为 **External**; -当您将经过训练的模型嵌入到 Unity 应用程序中时,需要将 +当您将经过训练的模型嵌入到 Unity 应用程序中时,需要将 **Brain Type** 更改为 **Internal**。 **向量观测空间** @@ -107,11 +107,11 @@ Brain 不存储关于 agent 的任何信息, 在决策之前,agent 会收集有关自己在环境中所处的状态的 观测结果。ML-Agents 将观测分为两类: **Continuous** 和 **Discrete**。**Continuous** 向量观测空间 -会收集浮点数向量中的观测结果。**Discrete** +会收集浮点数向量中的观测结果。**Discrete** 向量观测空间是一个状态表的索引。大多数示例环境 都使用连续的向量观测空间。 -3D Balance Ball 示例中所用的 Brain 实例使用 **State Size** 为 8 的 +3D Balance Ball 示例中所用的 Brain 实例使用 **State Size** 为 8 的 **Continuous** 向量观测空间。这意味着 包含 agent 观测结果的特征向量包含八个元素: 平台旋转的 `x` 和 `z` 分量以及球相对位置和 @@ -121,10 +121,10 @@ Brain 不存储关于 agent 的任何信息, **向量运动空间** Brain 以*动作*的形式向 agent 提供指令。与状态 -一样,ML-Agents 将动作分为两种类型:**Continuous** +一样,ML-Agents 将动作分为两种类型:**Continuous** 向量运动空间是一个可以连续变化的数字向量。向量 每个元素的含义都是由 agent 逻辑定义的(PPO 训练过程是一个了解agent的哪种状态更好的过程,这个过程是通过学习不同agent的不同状态会对应多少奖励来实现的)。 -例如,一个元素可能表示施加到 agent 某个 +例如,一个元素可能表示施加到 agent 某个 `Rigidbody` 上的力或扭矩。**Discrete** 向量运动空间将其动作 定义为一个表。提供给 agent 的具体动作是这个表的 索引。 @@ -134,7 +134,7 @@ Brain 以*动作*的形式向 agent 提供指令。与状态 您可以尝试使用两种设置进行训练,观测是否有 差异。(使用离散运动空间时将 `Vector Action Space Size` 设置为 4, 而使用连续运动空间时将其设置为 2。) - + ### Agent Agent 是在环境中进行观测并采取动作的参与者。 @@ -143,14 +143,14 @@ Agent 是在环境中进行观测并采取动作的参与者。 属性: * **Brain** — 每个 Agent 必须有一个 Brain。Brain 决定了 agent 如何 -决策。3D Balance Ball 场景中的所有 agent 共享同一个 +决策。3D Balance Ball 场景中的所有 agent 共享同一个 Brain。 * **Visual Observations** — 定义 agent 用来观测其环境的 任何 Camera 对象。3D Balance Ball 不使用摄像机观测。 * **Max Step** — 定义在 agent 决定自己完成之前可以发生多少个 模拟步骤。在 3D Balance Ball 中,agent 在 5000 步之后重新开始。 * **Reset On Done** — 定义 agent 是否在完成时重新开始。 -3D Balance Ball 将此项设置为 true,因此 agent 在达到 +3D Balance Ball 将此项设置为 true,因此 agent 在达到 **Max Step** 计数后或在掉球后重新开始。 也许 agent 更有趣的方面在于 Agent 子类的 @@ -163,9 +163,9 @@ Ball3DAgent 子类定义了以下方法: 训练不局限于特定的开始位置和平台 姿态。 * Agent.CollectObservations() — 在每个模拟步骤调用。负责 -收集 agent 对环境的观测结果。由于分配给 +收集 agent 对环境的观测结果。由于分配给 agent 的 Brain 实例设置为状态大小为 8 的连续向量观测空间, -因此 `CollectObservations()` 必须调用 8 次 +因此 `CollectObservations()` 必须调用 8 次 `AddVectorObs`。 * Agent.AgentAction() — 在每个模拟步骤调用。接收 Brain 选择的 动作。Ball3DAgent 示例可以处理连续和离散 @@ -180,14 +180,14 @@ agent 的 Brain 实例设置为状态大小为 8 的连续向量观测空间, ## 构建环境 -第一步是打开包含 3D Balance Ball 环境的 +第一步是打开包含 3D Balance Ball 环境的 Unity 场景: 1. 启动 Unity。 2. 在 Projects 对话框上,选择窗口顶部的 **Open** 选项。 -3. 使用随后打开的文件对话框,找到 ML-Agents 项目内的 +3. 使用随后打开的文件对话框,找到 ML-Agents 项目内的 `unity-environment` 文件夹,然后单击 **Open**。 -4. 在 `Project` 窗口中,找到文件夹 +4. 在 `Project` 窗口中,找到文件夹 `Assets/ML-Agents/Examples/3DBall/`。 5. 双击 `Scene` 文件以加载包含 Balance Ball 环境的 场景。 @@ -210,7 +210,7 @@ Unity 场景: * 环境应用程序在后台运行 * 没有对话需要互动 * 正确的场景会自动加载 - + 1. 打开 Player Settings(菜单:**Edit** > **Project Settings** > **Player**)。 2. 在 **Resolution and Presentation** 下方: - 确保选中 **Run in Background**。 @@ -232,8 +232,8 @@ Unity 场景: ## 使用 Reinforcement Learning(强化学习)来训练 Brain 有了一个包含模拟环境的 Unity 可执行文件后,现在我们 -可以执行训练。为了首先确保您的环境和 Python -API 能正常工作,您可以使用 `python/Basics` +可以执行训练。为了首先确保您的环境和 Python +API 能正常工作,您可以使用 `python/Basics` [Jupyter 笔记本](/docs/Background-Jupyter.md)。 此笔记本包含了 API 功能的简单演练。 在 `Basics` 中,务必将 `env_name` 设置为您先前构建的 @@ -241,21 +241,21 @@ API 能正常工作,您可以使用 `python/Basics` ### 使用 PPO 进行训练 -为了训练 agent 对球进行正确平衡,我们将使用一种称为 Proximal Policy Optimization (PPO) 的 +为了训练 agent 对球进行正确平衡,我们将使用一种称为 Proximal Policy Optimization (PPO) 的 Reinforcement Learning(强化学习)算法。 与其他许多 RL 算法相比,这种算法经证明是一种安全、 -有效且更通用的方法,因此我们选择它作为与 ML-Agents +有效且更通用的方法,因此我们选择它作为与 ML-Agents 一起使用的示例算法。有关 PPO 的更多信息, 请参阅 OpenAI 近期发布的[博客文章](https://blog.openai.com/openai-baselines-ppo/), 其中对 PPO 进行了说明。 -为了训练 Balance Ball 环境中的 agent,我们将使用 Python +为了训练 Balance Ball 环境中的 agent,我们将使用 Python 包。我们提供了一个名为 `learn.py` 的方便的 Python 包装脚本,此脚本会接受用于配置训练和预测阶段的参数。 我们将向这个脚本传递我们刚才构建的环境可执行文件的路径。(可选)我们可以 -使用 `run_id` 来识别实验并创建用于存储模型和摘要统计信息的文件夹。当使用 +使用 `run_id` 来识别实验并创建用于存储模型和摘要统计信息的文件夹。当使用 TensorBoard 来观测训练统计信息时,将每次训练的此项设置为顺序值 将会很有用。也就是说,第一次训练时为“BalanceBall1”, 第二次训练时为“BalanceBall2”,依此类推。如果不这样做,每次训练的 @@ -265,7 +265,7 @@ TensorBoard 来观测训练统计信息时,将每次训练的此项设置为 总之,转到命令行,进入 `ml-agents` 目录并输入: ``` -python3 python/learn.py --run-id= --train +python3 python/learn.py --run-id= --train ``` `--train` 标志告诉 ML-Agents 以训练模式运行。`env_name` 应该是刚才创建的 Unity 可执行文件的名字。 @@ -289,7 +289,7 @@ python3 python/learn.py --run-id= --train * Cumulative Reward - 所有 agent 的平均累积场景奖励。 在成功训练期间应该增大。 * Entropy - 模型决策的随机程度。在成功训练过程中 -应该缓慢减小。如果减小得太快,应增大 `beta` +应该缓慢减小。如果减小得太快,应增大 `beta` 超参数。 * Episode Length - 所有 agent 在环境中每个场景的 平均长度。 @@ -340,12 +340,12 @@ python3 python/learn.py --run-id= --train 1. 经过训练的模型存储在 `ml-agents` 文件夹中的 `models/` 内。训练 完成后,该位置会有一个 `.bytes` 文件,其中的 `` 是训练期间使用的可执行文件的 名称。 -2. 将 `.bytes` 从 `python/models/ppo/` 移入 +2. 将 `.bytes` 从 `python/models/ppo/` 移入 `unity-environment/Assets/ML-Agents/Examples/3DBall/TFModels/`。 3. 打开 Unity Editor,然后选择 `3DBall` 场景(如上所述)。 4. 从 Scene 层级视图中选择 `Ball3DBrain` 对象。 5. 将 `Type of Brain` 更改为 `Internal`。 -6. 将 `.bytes` 文件从 Editor 的 Project 窗口拖入 +6. 将 `.bytes` 文件从 Editor 的 Project 窗口拖入 `3DBallBrain` Inspector 窗口中的 `Graph Model` 占位区域。 7. 按 Editor 顶部的 Play 按钮。 diff --git a/docs/localized/zh-CN/docs/Installation.md b/docs/localized/zh-CN/docs/Installation.md index 4f17f483af..de9c68e213 100755 --- a/docs/localized/zh-CN/docs/Installation.md +++ b/docs/localized/zh-CN/docs/Installation.md @@ -7,12 +7,12 @@ ## 安装 **Unity 2017.1** 或更高版本 [下载](https://store.unity.com/download) 并安装 Unity。如果您想 -使用我们的 Docker 设置(稍后介绍),请确保在安装 Unity 时选择 +使用我们的 Docker 设置(稍后介绍),请确保在安装 Unity 时选择 _Linux Build Support_ 组件。

- Linux Build Support

@@ -22,7 +22,7 @@ _Linux Build Support_ 组件。 git clone git@github.com:Unity-Technologies/ml-agents.git -此代码仓库中的 `unity-environment` 目录包含了要添加到项目中的 +此代码仓库中的 `unity-environment` 目录包含了要添加到项目中的 Unity Assets。`python` 目录包含训练代码。 这两个目录都位于代码仓库的根目录。 diff --git a/docs/localized/zh-CN/docs/Learning-Environment-Create-New.md b/docs/localized/zh-CN/docs/Learning-Environment-Create-New.md index efee4fa1ea..ea73ada781 100755 --- a/docs/localized/zh-CN/docs/Learning-Environment-Create-New.md +++ b/docs/localized/zh-CN/docs/Learning-Environment-Create-New.md @@ -160,7 +160,7 @@ agent 到达目标时会将自己标记为完成状态,而 agent 重置函数 using System.Collections.Generic; using UnityEngine; -public class RollerAgent : Agent +public class RollerAgent : Agent { Rigidbody rBody; void Start () { @@ -171,14 +171,14 @@ public class RollerAgent : Agent public override void AgentReset() { if (this.transform.position.y < -1.0) - { + { // agent 掉落 this.transform.position = Vector3.zero; this.rBody.angularVelocity = Vector3.zero; this.rBody.velocity = Vector3.zero; } else - { + { // 将目标移动到新的位置 Target.position = new Vector3(Random.value * 8 - 4, 0.5f, @@ -235,17 +235,17 @@ public override void CollectObservations() { // 计算相对位置 Vector3 relativePosition = Target.position - this.transform.position; - + // 相对位置 AddVectorObs(relativePosition.x/5); AddVectorObs(relativePosition.z/5); - + // 与平台边缘的距离 AddVectorObs((this.transform.position.x + 5)/5); AddVectorObs((this.transform.position.x - 5)/5); AddVectorObs((this.transform.position.z + 5)/5); AddVectorObs((this.transform.position.z - 5)/5); - + // Agent 速度 AddVectorObs(rBody.velocity.x/5); AddVectorObs(rBody.velocity.z/5); @@ -317,7 +317,7 @@ if (this.transform.position.y < -1.0) ``` **AgentAction()** - + 利用上面列出的动作和奖励逻辑,`AgentAction()` 函数的最终版本如下所示: ```csharp @@ -327,16 +327,16 @@ private float previousDistance = float.MaxValue; public override void AgentAction(float[] vectorAction, string textAction) { // 奖励 - float distanceToTarget = Vector3.Distance(this.transform.position, + float distanceToTarget = Vector3.Distance(this.transform.position, Target.position); - + // 已到达目标 if (distanceToTarget < 1.42f) { Done(); AddReward(1.0f); } - + // 进一步接近 if (distanceToTarget < previousDistance) { @@ -407,9 +407,9 @@ public override void AgentAction(float[] vectorAction, string textAction) 按 **Play** 运行场景,并用 WASD 键在平台上移动 agent。确保在 Unity Editor Console 窗口中没有显示任何错误,并且 agent 在到达目标或掉下平台时会重置。请注意,对于较复杂的调试,ML-Agents SDK 提供了一个方便的 Monitor 类,您可以使用该类轻松地在 Game 窗口中显示 agent 状态信息。 -您可以执行一个额外的测试是,首先使用 `python/Basics` +您可以执行一个额外的测试是,首先使用 `python/Basics` [Jupyter Notebook](/docs/Background-Jupyter.md) -确保您的环境和 Python API 能正常工作。在 `Basics` 中,务必将 +确保您的环境和 Python API 能正常工作。在 `Basics` 中,务必将 `env_name` 设置为您生成的此环境对应的可执行文件的 名称。 diff --git a/docs/localized/zh-CN/docs/Learning-Environment-Design.md b/docs/localized/zh-CN/docs/Learning-Environment-Design.md index 3e6be8f3b7..d8abde3cb8 100755 --- a/docs/localized/zh-CN/docs/Learning-Environment-Design.md +++ b/docs/localized/zh-CN/docs/Learning-Environment-Design.md @@ -24,7 +24,7 @@ ML-Agents Academy 类按如下方式编排 agent 模拟循环: 8. 当 Academy 达到其自身的 `Max Step` 计数时,它会通过调用您的 Academy 子类的 `AcademyReset()` 函数来再次开始下一场景。 要创建训练环境,请扩展 Academy 和 Agent 类以实现上述方法。`Agent.CollectObservations()` 和 `Agent.AgentAction()` 函数必须实现;而其他方法是可选的,即是否需要实现它们取决于您的具体情况。 - + **注意:**在这里用到的 Python API 也可用于其他目的。例如,借助于该 API,您可以将 Unity 用作您自己的机器学习算法的模拟引擎。请参阅 [Python API](/docs/Python-API.md) 以了解更多信息。 ## 组织 Unity 场景 @@ -46,11 +46,11 @@ Academy 对象会指挥多个 agent 的决策过程。一个场景中有且仅 * `AcademyStep()` — 为下一模拟步骤准备环境。Academy 基类首先调用此函数,然后才调用当前步骤的任何 `AgentAction()` 方法。您可以使用此函数在 agent 采取动作之前更新场景中的其他对象。请注意,在 Academy 调用此方法之前,agent 已收集了自己的观测结果并选择了动作。 Academy 基类还定义了若干可以在 Unity Editor Inspector 中设置的重要属性。对于训练而言,这些属性中最重要的是 `Max Steps`,它决定了每个训练场景的持续时间。Academy 的步骤计数器达到此值后,它将调用 `AcademyReset()` 函数来开始下一轮模拟。 - + 请参阅 [Academy](/docs/Learning-Environment-Design-Academy.md) 以查看 Academy 属性及其用途的完整列表。 ### Brain - + Brain 内部封装了决策过程。Brain 对象必须放在 Hierarchy 视图中的 Academy 的子级。我们必须为每个 Agent 分配一个 Brain,但可以在多个 Agent 之间共享同一个 Brain。 当我们使用 Brain 类的时候不需要使用其子类,而应该直接使用 Brain 这个类。Brain 的行为取决于 Brain 的类型。在训练期间,应将 agent 上连接的 Brain 的 Brain Type 设置为 **External**。要使用经过训练的模型,请将模型文件导入 Unity 项目,并将对应 Brain 的 Brain Type 更改为 **Internal**。请参阅 [Brain](/docs/Learning-Environment-Design-Brains.md) 以了解有关使用不同类型的 Brain 的详细信息。如果四种内置的类型不能满足您的需求,您可以扩展 CoreBrain 类以创建其它的 Brain 类型。 @@ -69,7 +69,7 @@ Agent 类代表场景中负责收集观测结果并采取动作的一个参与 * `AgentAction()` — 执行由 agent 的 Brain 选择的动作,并为当前状态分配奖励。 这些函数的实现决定了分配给此 agent 的 Brain 的属性要如何设置。 - + 您还必须确定 Agent 如何完成任务,以及当它超时后如何处理。agent 完成其任务(或彻底失败)后,您可以在 `AgentAction()` 函数中手动将 agent 设置为完成。您还可以将 agent 的 `Max Steps` 属性设置为正值,这样 agent 在执行了此数量的步骤后会认为自己已完成。Academy 达到自己的 `Max Steps` 计数后,会开始下一场景。如果将 agent 的 `ResetOnDone` 属性设置为 true,则 agent 可以在一个场景中多次尝试自己的任务。(在 `Agent.AgentReset()` 函数中可以设置 agent 的初始化逻辑,为下一次的任务做好准备。) 请参阅 [Agent](/docs/Learning-Environment-Design-Agents.md) 以详细了解如何编写一个你自己的 agent。 diff --git a/docs/localized/zh-CN/docs/Learning-Environment-Examples.md b/docs/localized/zh-CN/docs/Learning-Environment-Examples.md index be1e772313..3ae0de424b 100644 --- a/docs/localized/zh-CN/docs/Learning-Environment-Examples.md +++ b/docs/localized/zh-CN/docs/Learning-Environment-Examples.md @@ -1,6 +1,6 @@ # 学习环境示例 -Unity ML-Agents 工具包中内置了一些搭建好的学习环境的示例,并且我们还在不断增加新的示例,这些示例演示了该平台的各种功能。示例环境位于 +Unity ML-Agents 工具包中内置了一些搭建好的学习环境的示例,并且我们还在不断增加新的示例,这些示例演示了该平台的各种功能。示例环境位于 `unity-environment/Assets/ML-Agents/Examples` 中,并且我们在下文中进行了简单的介绍。 此外,我们的 [首届 ML-Agents 挑战赛](https://connect.unity.com/challenges/ml-agents-1) diff --git a/docs/localized/zh-CN/docs/ML-Agents-Overview.md b/docs/localized/zh-CN/docs/ML-Agents-Overview.md index f488a74eb7..46c6f59153 100644 --- a/docs/localized/zh-CN/docs/ML-Agents-Overview.md +++ b/docs/localized/zh-CN/docs/ML-Agents-Overview.md @@ -2,7 +2,7 @@ **Unity Machine Learning Agents** (ML-Agents) 是一款开源的 Unity 插件,使我们得以在游戏和其它模拟环境中训练智能的 agent。您可以使用 reinforcement learning(强化学习)、 imitation learning(模仿学习)、neuroevolution(神经进化)或其他机器学习方法 -通过简单易用的 Python API 对 Agent 进行训练。我们还提供最先进算法的实现方式(基于 +通过简单易用的 Python API 对 Agent 进行训练。我们还提供最先进算法的实现方式(基于 TensorFlow),让游戏开发者和业余爱好者能够轻松地 训练用于 2D、3D 和 VR/AR 游戏的智能 agent。 这些经过训练的 agent 可用于多种目的, @@ -16,9 +16,9 @@ TensorFlow),让游戏开发者和业余爱好者能够轻松地 根据您的背景(如研究人员、游戏开发人员、业余爱好者), 您现在可能在脑海中会有非常不同的问题。 为了让您更轻松地过渡到 ML-Agents, -我们提供了多个后台页面,其中包括有关 +我们提供了多个后台页面,其中包括有关 [Unity 引擎](/docs/Background-Unity.md)、 -[机器学习](/docs/Background-Machine-Learning.md)和 +[机器学习](/docs/Background-Machine-Learning.md)和 [TensorFlow](/docs/Background-TensorFlow.md) 的概述和有用资源。如果您不熟悉 Unity 场景,不了解基本的机器学习概念,或者以前没有听说过 TensorFlow,**强烈**建议您浏览相关的背景知识页面。 此页面的其余部分深入介绍了 ML-Agents、包括其重要组件、 @@ -118,12 +118,12 @@ ML-Agents 是一个 Unity 插件,它包含三个高级组件: 所有机器学习算法。请注意, 与学习环境不同,Python API 不是 Unity 的一部分,而是位于外部 并通过 External Communicator 与 Unity 进行通信。 -* **External Communicator** - 它将 Unity 环境与 Python API +* **External Communicator** - 它将 Unity 环境与 Python API 连接起来。它位于 Unity 环境中。

- Simplified ML-Agents Scene Block Diagram

@@ -156,28 +156,28 @@ External Communicator 位于 Academy 内。 Brain 定义了所有可能的观测和动作的空间, 而与之相连的 Agent(在本示例中是指军医)可以各自拥有 自己独特的观测和动作值。如果我们将游戏 -扩展到包含坦克驾驶员 NPC,那么附加到这些角色的 +扩展到包含坦克驾驶员 NPC,那么附加到这些角色的 Agent 不能与连接到军医的 Agent 共享一个 Brain(军医和驾驶员 有不同的动作)。

- Example ML-Agents Scene Block Diagram

_示例游戏的 ML-Agents 的示例框图。_ -我们尚未讨论 ML-Agents 如何训练行为以及 Python API 和 +我们尚未讨论 ML-Agents 如何训练行为以及 Python API 和 External Communicator 的作用。在我们深入了解这些细节之前, 让我们总结一下先前的组件。每个游戏角色上附有一个 Agent, 而每个 Agent 都连接到一个 Brain。Brain 从 Agent 处接收观测结果和奖励并返回动作。Academy 除了能够控制环境参数之外,还可确保所有 Agent 和 Brain 都处于同步状态。那么,Brain 如何控制 Agent 的动作呢? 实际上,我们有四种不同类型的 Brain,它们可以实现广泛的训练和预测情形: * **External** - 使用 Python API 进行决策。这种情况下, -Brain 收集的观测结果和奖励通过 External Communicator +Brain 收集的观测结果和奖励通过 External Communicator 转发给 Python API。Python API 随后返回 Agent 需要采取的相应动作。 -* **Internal** - 使用嵌入式 +* **Internal** - 使用嵌入式 [TensorFlow](/docs/Background-TensorFlow.md) 模型进行决策。 嵌入式 TensorFlow 模型包含了学到的 policy,Brain 直接使用 此模型来确定每个 Agent 的动作。 @@ -188,7 +188,7 @@ Brain 收集的观测结果和奖励通过 External Communicator 具有写死逻辑行为的 Agent。也有助于把这种由写死逻辑指挥的 Agent 与 训练好的 Agent 进行比较。在我们的示例中,一旦我们 为军医训练了 Brain,我们便可以为一个军队的军医分配 -经过训练的 Brain,而为另一个军队的军医分配具有写死逻辑行为的 +经过训练的 Brain,而为另一个军队的军医分配具有写死逻辑行为的 Heuristic Brain。然后,我们可以评估哪个军医的效率更高。 根据目前所述,External Communicator 和 Python API 似乎 @@ -199,8 +199,8 @@ Heuristic Brain。然后,我们可以评估哪个军医的效率更高。 看到,这样可以实现其他的训练模式。

- ML-Agents Scene Block Diagram

@@ -227,16 +227,16 @@ Brain 类型在训练期间设置为 External,在预测期间设置为 Interna 因此所学的 policy 只是一个 TensorFlow 模型文件。然后在预测阶段, 我们将 Brain 类型切换为 Internal,并加入从训练阶段 生成的 TensorFlow 模型。现在,在预测阶段,军医 -仍然继续生成他们的观测结果,但不再将结果发送到 +仍然继续生成他们的观测结果,但不再将结果发送到 Python API,而是送入他们的嵌入了的 TensorFlow 模型, 以便生成每个军医在每个时间点上要采取的_最佳_动作。 总结一下:我们的实现是基于 TensorFlow 的,因此, -在训练期间,Python API 使用收到的观测结果来学习 -TensorFlow 模型。然后在预测过程中该模型将嵌入到 +在训练期间,Python API 使用收到的观测结果来学习 +TensorFlow 模型。然后在预测过程中该模型将嵌入到 Internal Brain 中,以便为连接到该 Brain 的所有 Agent 生成 最佳动作。**请注意,我们的 Internal Brain 目前是实验性的, -因为它仅限于 TensorFlow 模型并会利用第三方 +因为它仅限于 TensorFlow 模型并会利用第三方 [TensorFlowSharp](https://github.com/migueldeicaza/TensorFlowSharp) 库。** @@ -246,7 +246,7 @@ Internal Brain 中,以便为连接到该 Brain 的所有 Agent 生成 ### 自定义训练和预测 先前的模式中使用 External Brain 类型进行训练, -从而生成 Internal Brain 类型可以理解和使用的 TensorFlow +从而生成 Internal Brain 类型可以理解和使用的 TensorFlow 模型。然而,ML-Agents 的任何用户都可以利用自己的算法 进行训练和预测。在这种情况下,训练阶段和预测阶段 的 Brain 类型都会设置为 External,并且场景中所有 Agent 的行为 @@ -271,8 +271,8 @@ Internal Brain 中,以便为连接到该 Brain 的所有 Agent 生成 较难的任务提供基础。

- Example Math Curriculum

@@ -290,7 +290,7 @@ _数学课程的示例。从简单主题到复杂主题的课程进度安排, 即,随着环境逐渐复杂化,policy 也会不断 改进。在我们的示例中,我们可以考虑当每个队只包含一个 玩家时,首先训练军医,然后再反复增加玩家人数 -(即环境复杂度)。ML-Agents 支持在 +(即环境复杂度)。ML-Agents 支持在 Academy 内设置自定义环境参数。因此, 可以根据训练进度动态调整与难度或复杂性相关的 环境要素(比如游戏对象)。 @@ -369,7 +369,7 @@ agent 必须对事件作出反应的游戏, agent 必须学会记住过去才能做出 最好的决策。当 agent 只能部分观测环境时, 跟踪过去的观测结果可以帮助 agent 学习。我们在 -教练中提供了一种_长短期记忆_ +教练中提供了一种_长短期记忆_ ([LSTM](https://en.wikipedia.org/wiki/Long_short-term_memory)) 的实现,使 agent 能够存储要在未来步骤中 使用的记忆。您可以在 @@ -393,7 +393,7 @@ agent 必须学会记住过去才能做出 第一人称视觉的导航 agent。您可以在 [此处](/docs/Learning-Environment-Design-Agents.md#multiple-visual-observations)了解更多关于向 agent 添加视觉观测的 信息。 - + * **Broadcasting** - 如前所述,默认情况下,External Brain 会将 其所有 Agent 的观测结果发送到 Python API。这对 训练或预测很有帮助。Broadcasting 是一种可以为 @@ -407,25 +407,25 @@ agent 必须学会记住过去才能做出 [此处](/docs/Learning-Environment-Design-Brains.md#using-the-broadcast-feature)了解更多关于使用 broadcasting 功能的 信息。 -* **Docker 设置(测试功能)** - 为了便于在不直接安装 +* **Docker 设置(测试功能)** - 为了便于在不直接安装 Python 或 TensorFlow 的情况下设置 ML-Agents, 我们提供了关于如何创建和运行 Docker 容器的 [指南](/docs/Using-Docker.md)。由于渲染视觉观测的限制, 该功能被标记为测试功能。 -* **AWS 上的云训练** - 为了便于在 Amazon Web Services (AWS) +* **AWS 上的云训练** - 为了便于在 Amazon Web Services (AWS) 机器上使用 ML-Agents,我们提供了一份 [指南](/docs/Training-on-Amazon-Web-Service.md) -让您了解如何设置 EC2 实例以及公共的预配置 Amazon +让您了解如何设置 EC2 实例以及公共的预配置 Amazon Machine Image (AMI)。 -* **Microsoft Azure 上的云训练** - 为了便于在 Microsoft Azure +* **Microsoft Azure 上的云训练** - 为了便于在 Microsoft Azure 机器上使用 ML-Agents,我们提供了一份 [指南](/docs/Training-on-Microsoft-Azure.md) 让您了解如何设置 virtual machine instance 实例以及公共的预配置 Data Science VM。 * **Cloud Training on Microsoft Azure** - To facilitate using ML-Agents on -Azure machines, we provide a +Azure machines, we provide a [guide](Training-on-Microsoft-Azure.md) on how to set-up virtual machine instances in addition to a pre-configured data science image. diff --git a/docs/localized/zh-CN/docs/Readme.md b/docs/localized/zh-CN/docs/Readme.md index 81f9b1679b..4c98ddda7d 100644 --- a/docs/localized/zh-CN/docs/Readme.md +++ b/docs/localized/zh-CN/docs/Readme.md @@ -20,7 +20,7 @@ * [训练环境设计要点](/docs/Learning-Environment-Best-Practices.md) * [如何使用 Monitor 功能](/docs/Feature-Monitor.md) * [如何使用 TensorFlowSharp 插件(测试功能)](/docs/Using-TensorFlow-Sharp-in-Unity.md) - + ## 进行训练 * [如何用 ML-Agents 进行训练](/docs/Training-ML-Agents.md) * [Proximal Policy Optimization 训练要点](/docs/Training-PPO.md) @@ -36,7 +36,7 @@ * [常见问题](/docs/FAQ.md) * [ML-Agents 术语表](/docs/Glossary.md) * [ML-Agents 尚未实现功能](/docs/Limitations.md) - + ## API 文档 * [API 参考](/docs/API-Reference.md) * [如何使用 Python API](/docs/Python-API.md) diff --git a/gym-unity/README.md b/gym-unity/README.md index f1c14df650..6d7872508a 100755 --- a/gym-unity/README.md +++ b/gym-unity/README.md @@ -43,14 +43,14 @@ env = UnityEnv(environment_filename, worker_id, use_visual, uint8_visual, multia observations (False) as the default observation provided by the `reset` and `step` functions. Defaults to `False`. -* `uint8_visual` refers to whether to output visual observations as `uint8` values - (0-255). Many common Gym environments (e.g. Atari) do this. By default they +* `uint8_visual` refers to whether to output visual observations as `uint8` values + (0-255). Many common Gym environments (e.g. Atari) do this. By default they will be floats (0.0-1.0). Defaults to `False`. * `multiagent` refers to whether you intent to launch an environment which contains more than one agent. Defaults to `False`. -* `flatten_branched` will flatten a branched discrete action space into a Gym Discrete. +* `flatten_branched` will flatten a branched discrete action space into a Gym Discrete. Otherwise, it will be converted into a MultiDiscrete. Defaults to `False`. * `allow_multiple_visual_obs` will return a list of observation instead of only @@ -63,8 +63,8 @@ For more on using the gym interface, see our ## Limitations -* It is only possible to use an environment with a single Brain. -* By default, the first visual observation is provided as the `observation`, if +* It is only possible to use an environment with a single Agent. +* By default, the first visual observation is provided as the `observation`, if present. Otherwise, vector observations are provided. You can receive all visual observations by using the `allow_multiple_visual_obs=True` option in the gym parameters. If set to `True`, you will receive a list of `observation` instead @@ -89,17 +89,17 @@ These examples were tested with baselines version 0.1.6. ### Example - DQN Baseline In order to train an agent to play the `GridWorld` environment using the -Baselines DQN algorithm, you first need to install the baselines package using +Baselines DQN algorithm, you first need to install the baselines package using pip: ``` pip install git+git://github.com/openai/baselines ``` -Next, create a file called `train_unity.py`. Then create an `/envs/` directory -and build the GridWorld environment to that directory. For more information on -building Unity environments, see -[here](../docs/Learning-Environment-Executable.md). Add the following code to +Next, create a file called `train_unity.py`. Then create an `/envs/` directory +and build the GridWorld environment to that directory. For more information on +building Unity environments, see +[here](../docs/Learning-Environment-Executable.md). Add the following code to the `train_unity.py` file: ```python @@ -148,7 +148,7 @@ python -m train_unity ### Other Algorithms Other algorithms in the Baselines repository can be run using scripts similar to -the examples from the baselines package. In most cases, the primary changes needed +the examples from the baselines package. In most cases, the primary changes needed to use a Unity environment are to import `UnityEnv`, and to replace the environment creation code, typically `gym.make()`, with a call to `UnityEnv(env_path)` passing the environment binary path. @@ -158,7 +158,7 @@ should be done to Atari training scripts, and for vector observation environments, modification should be done to Mujoco scripts. Some algorithms will make use of `make_env()` or `make_mujoco_env()` -functions. You can define a similar function for Unity environments. An example of +functions. You can define a similar function for Unity environments. An example of such a method using the PPO2 baseline: ```python @@ -208,37 +208,37 @@ if __name__ == '__main__': ## Run Google Dopamine Algorithms Google provides a framework [Dopamine](https://github.com/google/dopamine), and -implementations of algorithms, e.g. DQN, Rainbow, and the C51 variant of Rainbow. -Using the Gym wrapper, we can run Unity environments using Dopamine. +implementations of algorithms, e.g. DQN, Rainbow, and the C51 variant of Rainbow. +Using the Gym wrapper, we can run Unity environments using Dopamine. -First, after installing the Gym wrapper, clone the Dopamine repository. +First, after installing the Gym wrapper, clone the Dopamine repository. ``` git clone https://github.com/google/dopamine ``` -Then, follow the appropriate install instructions as specified on -[Dopamine's homepage](https://github.com/google/dopamine). Note that the Dopamine -guide specifies using a virtualenv. If you choose to do so, make sure your unity_env +Then, follow the appropriate install instructions as specified on +[Dopamine's homepage](https://github.com/google/dopamine). Note that the Dopamine +guide specifies using a virtualenv. If you choose to do so, make sure your unity_env package is also installed within the same virtualenv as Dopamine. ### Adapting Dopamine's Scripts -First, open `dopamine/atari/run_experiment.py`. Alternatively, copy the entire `atari` +First, open `dopamine/atari/run_experiment.py`. Alternatively, copy the entire `atari` folder, and name it something else (e.g. `unity`). If you choose the copy approach, be sure to change the package names in the import statements in `train.py` to your new directory. -Within `run_experiment.py`, we will need to make changes to which environment is -instantiated, just as in the Baselines example. At the top of the file, insert +Within `run_experiment.py`, we will need to make changes to which environment is +instantiated, just as in the Baselines example. At the top of the file, insert ```python from gym_unity.envs import UnityEnv ``` -to import the Gym Wrapper. Navigate to the `create_atari_environment` method +to import the Gym Wrapper. Navigate to the `create_atari_environment` method in the same file, and switch to instantiating a Unity environment by replacing -the method with the following code. +the method with the following code. ```python game_version = 'v0' if sticky_actions else 'v4' @@ -247,36 +247,35 @@ the method with the following code. return env ``` -`./envs/GridWorld` is the path to your built Unity executable. For more information on -building Unity environments, see [here](../docs/Learning-Environment-Executable.md), and note -the Limitations section below. +`./envs/GridWorld` is the path to your built Unity executable. For more information on +building Unity environments, see [here](../docs/Learning-Environment-Executable.md), and note +the Limitations section below. -Note that we are not using the preprocessor from Dopamine, -as it uses many Atari-specific calls. Furthermore, frame-skipping can be done from within Unity, -rather than on the Python side. +Note that we are not using the preprocessor from Dopamine, +as it uses many Atari-specific calls. Furthermore, frame-skipping can be done from within Unity, +rather than on the Python side. ### Limitations Since Dopamine is designed around variants of DQN, it is only compatible with discrete action spaces, and specifically the Discrete Gym space. For environments -that use branched discrete action spaces (e.g. -[VisualBanana](../docs/Learning-Environment-Examples.md)), you can enable the -`flatten_branched` parameter in `UnityEnv`, which treats each combination of branched +that use branched discrete action spaces (e.g. +[VisualBanana](../docs/Learning-Environment-Examples.md)), you can enable the +`flatten_branched` parameter in `UnityEnv`, which treats each combination of branched actions as separate actions. -Furthermore, when building your environments, ensure that your -[Learning Brain](../docs/Learning-Environment-Design-Brains.md) is using visual +Furthermore, when building your environments, ensure that your Agent is using visual observations with greyscale enabled, and that the dimensions of the visual observations is 84 by 84 (matches the parameter found in `dqn_agent.py` and `rainbow_agent.py`). -Dopamine's agents currently do not automatically adapt to the observation -dimensions or number of channels. +Dopamine's agents currently do not automatically adapt to the observation +dimensions or number of channels. ### Hyperparameters The hyperparameters provided by Dopamine are tailored to the Atari games, and you will -likely need to adjust them for ML-Agents environments. Here is a sample -`dopamine/agents/rainbow/configs/rainbow.gin` file that is known to work with -GridWorld. +likely need to adjust them for ML-Agents environments. Here is a sample +`dopamine/agents/rainbow/configs/rainbow.gin` file that is known to work with +GridWorld. ```python import dopamine.agents.rainbow.rainbow_agent @@ -314,9 +313,9 @@ WrappedPrioritizedReplayBuffer.batch_size = 32 ``` This example assumed you copied `atari` to a separate folder named `unity`. -Replace `unity` in `import dopamine.unity.run_experiment` with the folder you +Replace `unity` in `import dopamine.unity.run_experiment` with the folder you copied your `run_experiment.py` and `trainer.py` files to. -If you directly modified the existing files, then use `atari` here. +If you directly modified the existing files, then use `atari` here. ### Starting a Run @@ -329,20 +328,20 @@ python -um dopamine.unity.train \ --gin_files='dopamine/agents/rainbow/configs/rainbow.gin' ``` -Again, we assume that you've copied `atari` into a separate folder. +Again, we assume that you've copied `atari` into a separate folder. Remember to replace `unity` with the directory you copied your files into. If you edited the Atari files directly, this should be `atari`. ### Example: GridWorld As a baseline, here are rewards over time for the three algorithms provided with -Dopamine as run on the GridWorld example environment. All Dopamine (DQN, Rainbow, -C51) runs were done with the same epsilon, epsilon decay, replay history, training steps, +Dopamine as run on the GridWorld example environment. All Dopamine (DQN, Rainbow, +C51) runs were done with the same epsilon, epsilon decay, replay history, training steps, and buffer settings as specified above. Note that the first 20000 steps are used to pre-fill -the training buffer, and no learning happens. +the training buffer, and no learning happens. -We provide results from our PPO implementation and the DQN from Baselines as reference. -Note that all runs used the same greyscale GridWorld as Dopamine. For PPO, `num_layers` +We provide results from our PPO implementation and the DQN from Baselines as reference. +Note that all runs used the same greyscale GridWorld as Dopamine. For PPO, `num_layers` was set to 2, and all other hyperparameters are the default for GridWorld in `trainer_config.yaml`. For Baselines DQN, the provided hyperparameters in the previous section are used. Note that Baselines implements certain features (e.g. dueling-Q) that are not enabled @@ -353,8 +352,8 @@ in Dopamine DQN. ### Example: VisualBanana As an example of using the `flatten_branched` option, we also used the Rainbow -algorithm to train on the VisualBanana environment, and provide the results below. -The same hyperparameters were used as in the GridWorld case, except that +algorithm to train on the VisualBanana environment, and provide the results below. +The same hyperparameters were used as in the GridWorld case, except that `replay_history` and `epsilon_decay` were increased to 100000. ![Dopamine on VisualBanana](images/dopamine_visualbanana_plot.png) diff --git a/gym-unity/gym_unity/envs/__init__.py b/gym-unity/gym_unity/envs/__init__.py index b4d7e4717a..68e6206adf 100644 --- a/gym-unity/gym_unity/envs/__init__.py +++ b/gym-unity/gym_unity/envs/__init__.py @@ -51,6 +51,11 @@ def __init__( self._env = UnityEnvironment( environment_filename, worker_id, no_graphics=no_graphics ) + + # Take a single step so that the brain information will be sent over + if not self._env.brains: + self._env.step() + self.name = self._env.academy_name self.visual_obs = None self._current_state = None @@ -132,20 +137,20 @@ def __init__( high = np.array([np.inf] * brain.vector_observation_space_size) self.action_meanings = brain.vector_action_descriptions if self.use_visual: - if brain.camera_resolutions[0]["blackAndWhite"]: - depth = 1 - else: - depth = 3 - self._observation_space = spaces.Box( - 0, - 1, - dtype=np.float32, - shape=( - brain.camera_resolutions[0]["height"], - brain.camera_resolutions[0]["width"], - depth, - ), + shape = ( + brain.camera_resolutions[0].height, + brain.camera_resolutions[0].width, + brain.camera_resolutions[0].num_channels, ) + if uint8_visual: + self._observation_space = spaces.Box( + 0, 255, dtype=np.uint8, shape=shape + ) + else: + self._observation_space = spaces.Box( + 0, 1, dtype=np.float32, shape=shape + ) + else: self._observation_space = spaces.Box(-high, high, dtype=np.float32) diff --git a/gym-unity/gym_unity/tests/test_gym.py b/gym-unity/gym_unity/tests/test_gym.py index 18bd08f12c..cbc35454c5 100644 --- a/gym-unity/gym_unity/tests/test_gym.py +++ b/gym-unity/gym_unity/tests/test_gym.py @@ -4,6 +4,7 @@ from gym import spaces from gym_unity.envs import UnityEnv, UnityGymException +from mlagents.envs.brain import CameraResolution @mock.patch("gym_unity.envs.UnityEnvironment") @@ -18,6 +19,7 @@ def test_gym_wrapper(mock_env): actions = env.action_space.sample() assert actions.shape[0] == 2 obs, rew, done, info = env.step(actions) + assert env.observation_space.contains(obs) assert isinstance(obs, np.ndarray) assert isinstance(rew, float) assert isinstance(done, bool) @@ -62,6 +64,26 @@ def test_branched_flatten(mock_env): assert isinstance(env.action_space, spaces.MultiDiscrete) +@pytest.mark.parametrize("use_uint8", [True, False], ids=["float", "uint8"]) +@mock.patch("gym_unity.envs.UnityEnvironment") +def test_gym_wrapper_visual(mock_env, use_uint8): + mock_brain = create_mock_brainparams(number_visual_observations=1) + mock_braininfo = create_mock_vector_braininfo(number_visual_observations=1) + setup_mock_unityenvironment(mock_env, mock_brain, mock_braininfo) + + env = UnityEnv(" ", use_visual=True, multiagent=False, uint8_visual=use_uint8) + assert isinstance(env, UnityEnv) + assert isinstance(env.reset(), np.ndarray) + actions = env.action_space.sample() + assert actions.shape[0] == 2 + obs, rew, done, info = env.step(actions) + assert env.observation_space.contains(obs) + assert isinstance(obs, np.ndarray) + assert isinstance(rew, float) + assert isinstance(done, bool) + assert isinstance(info, dict) + + # Helper methods @@ -80,6 +102,11 @@ def create_mock_brainparams( vector_action_space_size = [2] mock_brain = mock.Mock() mock_brain.return_value.number_visual_observations = number_visual_observations + if number_visual_observations: + mock_brain.return_value.camera_resolutions = [ + CameraResolution(width=8, height=8, num_channels=3) + for _ in range(number_visual_observations) + ] mock_brain.return_value.num_stacked_vector_observations = ( num_stacked_vector_observations ) @@ -91,7 +118,7 @@ def create_mock_brainparams( return mock_brain() -def create_mock_vector_braininfo(num_agents=1): +def create_mock_vector_braininfo(num_agents=1, number_visual_observations=0): """ Creates a mock BrainInfo with vector observations. Imitates constant vector observations, rewards, dones, and agents. @@ -100,6 +127,8 @@ def create_mock_vector_braininfo(num_agents=1): """ mock_braininfo = mock.Mock() mock_braininfo.return_value.vector_observations = np.array([num_agents * [1, 2, 3]]) + if number_visual_observations: + mock_braininfo.return_value.visual_observations = [[np.zeros(shape=(8, 8, 3))]] mock_braininfo.return_value.rewards = num_agents * [1.0] mock_braininfo.return_value.local_done = num_agents * [False] mock_braininfo.return_value.text_observations = num_agents * [""] diff --git a/gym-unity/setup.py b/gym-unity/setup.py index b8997d6ffa..f724e0dae8 100755 --- a/gym-unity/setup.py +++ b/gym-unity/setup.py @@ -1,15 +1,40 @@ #!/usr/bin/env python +import os +import sys from setuptools import setup, find_packages +from setuptools.command.install import install + +VERSION = "0.11.0" + + +class VerifyVersionCommand(install): + """ + Custom command to verify that the git tag matches our version + See https://circleci.com/blog/continuously-deploying-python-packages-to-pypi-with-circleci/ + """ + + description = "verify that the git tag matches our version" + + def run(self): + tag = os.getenv("CIRCLE_TAG") + + if tag != VERSION: + info = "Git tag: {0} does not match the version of this app: {1}".format( + tag, VERSION + ) + sys.exit(info) + setup( name="gym_unity", - version="0.4.8", + version=VERSION, description="Unity Machine Learning Agents Gym Interface", license="Apache License 2.0", author="Unity Technologies", author_email="ML-Agents@unity3d.com", url="https://github.com/Unity-Technologies/ml-agents", packages=find_packages(), - install_requires=["gym", "mlagents_envs==0.10.1"], + install_requires=["gym", "mlagents_envs=={}".format(VERSION)], + cmdclass={"verify": VerifyVersionCommand}, ) diff --git a/ml-agents-envs/README.md b/ml-agents-envs/README.md index a870485ee4..c3fa2dbf74 100644 --- a/ml-agents-envs/README.md +++ b/ml-agents-envs/README.md @@ -3,9 +3,9 @@ The `mlagents_envs` Python package is part of the [ML-Agents Toolkit](https://github.com/Unity-Technologies/ml-agents). `mlagents_envs` provides a Python API that allows direct interaction with the Unity -game engine. It is used by the trainer implementation in `mlagents` as well as -the `gym-unity` package to perform reinforcement learning within Unity. `mlagents_envs` can be -used independently of `mlagents` for Python communication. +game engine. It is used by the trainer implementation in `mlagents` as well as +the `gym-unity` package to perform reinforcement learning within Unity. `mlagents_envs` can be +used independently of `mlagents` for Python communication. The `mlagents_envs` Python package contains one sub package: diff --git a/ml-agents-envs/mlagents/envs/brain.py b/ml-agents-envs/mlagents/envs/brain.py index 8c4f73bdfd..1c33f7ec4d 100644 --- a/ml-agents-envs/mlagents/envs/brain.py +++ b/ml-agents-envs/mlagents/envs/brain.py @@ -2,24 +2,32 @@ import numpy as np import io -from mlagents.envs.communicator_objects.agent_info_proto_pb2 import AgentInfoProto -from mlagents.envs.communicator_objects.brain_parameters_proto_pb2 import ( - BrainParametersProto, -) +from mlagents.envs.communicator_objects.agent_info_pb2 import AgentInfoProto +from mlagents.envs.communicator_objects.brain_parameters_pb2 import BrainParametersProto from mlagents.envs.timers import hierarchical_timer, timed -from typing import Dict, List, Optional +from typing import Dict, List, NamedTuple, Optional from PIL import Image logger = logging.getLogger("mlagents.envs") +class CameraResolution(NamedTuple): + height: int + width: int + num_channels: int + + @property + def gray_scale(self) -> bool: + return self.num_channels == 1 + + class BrainParameters: def __init__( self, brain_name: str, vector_observation_space_size: int, num_stacked_vector_observations: int, - camera_resolutions: List[Dict], + camera_resolutions: List[CameraResolution], vector_action_space_size: List[int], vector_action_descriptions: List[str], vector_action_space_type: int, @@ -56,21 +64,24 @@ def __str__(self): ) @staticmethod - def from_proto(brain_param_proto: BrainParametersProto) -> "BrainParameters": + def from_proto( + brain_param_proto: BrainParametersProto, agent_info: AgentInfoProto + ) -> "BrainParameters": """ Converts brain parameter proto to BrainParameter object. :param brain_param_proto: protobuf object. :return: BrainParameter object. """ - resolution = [ - {"height": x.height, "width": x.width, "blackAndWhite": x.gray_scale} - for x in brain_param_proto.camera_resolutions + resolutions = [ + CameraResolution(x.shape[0], x.shape[1], x.shape[2]) + for x in agent_info.compressed_observations ] + brain_params = BrainParameters( brain_param_proto.brain_name, brain_param_proto.vector_observation_size, brain_param_proto.num_stacked_vector_observations, - resolution, + resolutions, list(brain_param_proto.vector_action_size), list(brain_param_proto.vector_action_descriptions), brain_param_proto.vector_action_space_type, @@ -187,8 +198,8 @@ def from_agent_proto( for i in range(brain_params.number_visual_observations): obs = [ BrainInfo.process_pixels( - x.visual_observations[i], - brain_params.camera_resolutions[i]["blackAndWhite"], + x.compressed_observations[i].data, + brain_params.camera_resolutions[i].gray_scale, ) for x in agent_info_list ] @@ -196,7 +207,7 @@ def from_agent_proto( if len(agent_info_list) == 0: memory_size = 0 else: - memory_size = max([len(x.memories) for x in agent_info_list]) + memory_size = max(len(x.memories) for x in agent_info_list) if memory_size == 0: memory = np.zeros((0, 0)) else: @@ -214,14 +225,10 @@ def from_agent_proto( 0 if agent_info.action_mask[k] else 1 for k in range(total_num_actions) ] - if any([np.isnan(x.reward) for x in agent_info_list]): + if any(np.isnan(x.reward) for x in agent_info_list): logger.warning( "An agent had a NaN reward for brain " + brain_params.brain_name ) - if any([np.isnan(x.stacked_vector_observation).any() for x in agent_info_list]): - logger.warning( - "An agent had a NaN observation for brain " + brain_params.brain_name - ) if len(agent_info_list) == 0: vector_obs = np.zeros( @@ -232,9 +239,32 @@ def from_agent_proto( ) ) else: - vector_obs = np.nan_to_num( - np.array([x.stacked_vector_observation for x in agent_info_list]) - ) + stacked_obs = [] + has_nan = False + has_inf = False + for x in agent_info_list: + np_obs = np.array(x.stacked_vector_observation) + # Check for NaNs or infs in the observations + # If there's a NaN in the observations, the dot() result will be NaN + # If there's an Inf (either sign) then the result will be Inf + # See https://stackoverflow.com/questions/6736590/fast-check-for-nan-in-numpy for background + # Note that a very large values (larger than sqrt(float_max)) will result in an Inf value here + # This is OK though, worst case it results in an unnecessary (but harmless) nan_to_num call. + d = np.dot(np_obs, np_obs) + has_nan = has_nan or np.isnan(d) + has_inf = has_inf or not np.isfinite(d) + stacked_obs.append(np_obs) + vector_obs = np.array(stacked_obs) + + # In we have any NaN or Infs, use np.nan_to_num to replace these with finite values + if has_nan or has_inf: + vector_obs = np.nan_to_num(vector_obs) + + if has_nan: + logger.warning( + f"An agent had a NaN observation for brain {brain_params.brain_name}" + ) + agents = [f"${worker_id}-{x.id}" for x in agent_info_list] brain_info = BrainInfo( visual_observation=vis_obs, diff --git a/ml-agents-envs/mlagents/envs/communicator.py b/ml-agents-envs/mlagents/envs/communicator.py index 61286b25a4..720f0e8963 100644 --- a/ml-agents-envs/mlagents/envs/communicator.py +++ b/ml-agents-envs/mlagents/envs/communicator.py @@ -1,8 +1,8 @@ import logging from typing import Optional -from mlagents.envs.communicator_objects.unity_output_pb2 import UnityOutput -from mlagents.envs.communicator_objects.unity_input_pb2 import UnityInput +from mlagents.envs.communicator_objects.unity_output_pb2 import UnityOutputProto +from mlagents.envs.communicator_objects.unity_input_pb2 import UnityInputProto logger = logging.getLogger("mlagents.envs") @@ -16,14 +16,14 @@ def __init__(self, worker_id=0, base_port=5005): :int worker_id: Number to add to communication port (5005) [0]. Used for asynchronous agent scenarios. """ - def initialize(self, inputs: UnityInput) -> UnityOutput: + def initialize(self, inputs: UnityInputProto) -> UnityOutputProto: """ Used to exchange initialization parameters between Python and the Environment :param inputs: The initialization input that will be sent to the environment. :return: UnityOutput: The initialization output sent by Unity """ - def exchange(self, inputs: UnityInput) -> Optional[UnityOutput]: + def exchange(self, inputs: UnityInputProto) -> Optional[UnityOutputProto]: """ Used to send an input and receive an output from the Environment :param inputs: The UnityInput that needs to be sent the Environment diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/agent_action_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/agent_action_pb2.py new file mode 100644 index 0000000000..9b2454e53d --- /dev/null +++ b/ml-agents-envs/mlagents/envs/communicator_objects/agent_action_pb2.py @@ -0,0 +1,102 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: mlagents/envs/communicator_objects/agent_action.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from mlagents.envs.communicator_objects import custom_action_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_custom__action__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='mlagents/envs/communicator_objects/agent_action.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n5mlagents/envs/communicator_objects/agent_action.proto\x12\x14\x63ommunicator_objects\x1a\x36mlagents/envs/communicator_objects/custom_action.proto\"\xa1\x01\n\x10\x41gentActionProto\x12\x16\n\x0evector_actions\x18\x01 \x03(\x02\x12\x14\n\x0ctext_actions\x18\x02 \x01(\t\x12\x10\n\x08memories\x18\x03 \x03(\x02\x12\r\n\x05value\x18\x04 \x01(\x02\x12>\n\rcustom_action\x18\x05 \x01(\x0b\x32\'.communicator_objects.CustomActionProtoB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') + , + dependencies=[mlagents_dot_envs_dot_communicator__objects_dot_custom__action__pb2.DESCRIPTOR,]) + + + + +_AGENTACTIONPROTO = _descriptor.Descriptor( + name='AgentActionProto', + full_name='communicator_objects.AgentActionProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='vector_actions', full_name='communicator_objects.AgentActionProto.vector_actions', index=0, + number=1, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='text_actions', full_name='communicator_objects.AgentActionProto.text_actions', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='memories', full_name='communicator_objects.AgentActionProto.memories', index=2, + number=3, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='communicator_objects.AgentActionProto.value', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='custom_action', full_name='communicator_objects.AgentActionProto.custom_action', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=136, + serialized_end=297, +) + +_AGENTACTIONPROTO.fields_by_name['custom_action'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_custom__action__pb2._CUSTOMACTIONPROTO +DESCRIPTOR.message_types_by_name['AgentActionProto'] = _AGENTACTIONPROTO +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +AgentActionProto = _reflection.GeneratedProtocolMessageType('AgentActionProto', (_message.Message,), dict( + DESCRIPTOR = _AGENTACTIONPROTO, + __module__ = 'mlagents.envs.communicator_objects.agent_action_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.AgentActionProto) + )) +_sym_db.RegisterMessage(AgentActionProto) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) +# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/agent_action_proto_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/agent_action_pb2.pyi similarity index 57% rename from ml-agents-envs/mlagents/envs/communicator_objects/agent_action_proto_pb2.pyi rename to ml-agents-envs/mlagents/envs/communicator_objects/agent_action_pb2.pyi index 4df09226eb..d96652aee0 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/agent_action_proto_pb2.pyi +++ b/ml-agents-envs/mlagents/envs/communicator_objects/agent_action_pb2.pyi @@ -1,5 +1,9 @@ # @generated by generate_proto_mypy_stubs.py. Do not edit! import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + from google.protobuf.internal.containers import ( RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, ) @@ -9,7 +13,7 @@ from google.protobuf.message import ( ) from mlagents.envs.communicator_objects.custom_action_pb2 import ( - CustomAction as mlagents___envs___communicator_objects___custom_action_pb2___CustomAction, + CustomActionProto as mlagents___envs___communicator_objects___custom_action_pb2___CustomActionProto, ) from typing import ( @@ -23,29 +27,37 @@ from typing_extensions import ( ) +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + class AgentActionProto(google___protobuf___message___Message): - vector_actions = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[float] + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + vector_actions = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___float] text_actions = ... # type: typing___Text - memories = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[float] - value = ... # type: float + memories = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___float] + value = ... # type: builtin___float @property - def custom_action(self) -> mlagents___envs___communicator_objects___custom_action_pb2___CustomAction: ... + def custom_action(self) -> mlagents___envs___communicator_objects___custom_action_pb2___CustomActionProto: ... def __init__(self, - vector_actions : typing___Optional[typing___Iterable[float]] = None, + *, + vector_actions : typing___Optional[typing___Iterable[builtin___float]] = None, text_actions : typing___Optional[typing___Text] = None, - memories : typing___Optional[typing___Iterable[float]] = None, - value : typing___Optional[float] = None, - custom_action : typing___Optional[mlagents___envs___communicator_objects___custom_action_pb2___CustomAction] = None, + memories : typing___Optional[typing___Iterable[builtin___float]] = None, + value : typing___Optional[builtin___float] = None, + custom_action : typing___Optional[mlagents___envs___communicator_objects___custom_action_pb2___CustomActionProto] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> AgentActionProto: ... + def FromString(cls, s: builtin___bytes) -> AgentActionProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"custom_action"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"custom_action"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"custom_action",u"memories",u"text_actions",u"value",u"vector_actions"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"custom_action",b"custom_action"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"custom_action",b"memories",b"text_actions",b"value",b"vector_actions"]) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"custom_action",b"custom_action"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"custom_action",b"custom_action",u"memories",b"memories",u"text_actions",b"text_actions",u"value",b"value",u"vector_actions",b"vector_actions"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/agent_action_proto_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/agent_action_proto_pb2.py deleted file mode 100644 index fe8be9f0b6..0000000000 --- a/ml-agents-envs/mlagents/envs/communicator_objects/agent_action_proto_pb2.py +++ /dev/null @@ -1,168 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: mlagents/envs/communicator_objects/agent_action_proto.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from mlagents.envs.communicator_objects import ( - custom_action_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_custom__action__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/agent_action_proto.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\n;mlagents/envs/communicator_objects/agent_action_proto.proto\x12\x14\x63ommunicator_objects\x1a\x36mlagents/envs/communicator_objects/custom_action.proto"\x9c\x01\n\x10\x41gentActionProto\x12\x16\n\x0evector_actions\x18\x01 \x03(\x02\x12\x14\n\x0ctext_actions\x18\x02 \x01(\t\x12\x10\n\x08memories\x18\x03 \x03(\x02\x12\r\n\x05value\x18\x04 \x01(\x02\x12\x39\n\rcustom_action\x18\x05 \x01(\x0b\x32".communicator_objects.CustomActionB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), - dependencies=[ - mlagents_dot_envs_dot_communicator__objects_dot_custom__action__pb2.DESCRIPTOR - ], -) - - -_AGENTACTIONPROTO = _descriptor.Descriptor( - name="AgentActionProto", - full_name="communicator_objects.AgentActionProto", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="vector_actions", - full_name="communicator_objects.AgentActionProto.vector_actions", - index=0, - number=1, - type=2, - cpp_type=6, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="text_actions", - full_name="communicator_objects.AgentActionProto.text_actions", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="memories", - full_name="communicator_objects.AgentActionProto.memories", - index=2, - number=3, - type=2, - cpp_type=6, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="communicator_objects.AgentActionProto.value", - index=3, - number=4, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="custom_action", - full_name="communicator_objects.AgentActionProto.custom_action", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=142, - serialized_end=298, -) - -_AGENTACTIONPROTO.fields_by_name[ - "custom_action" -].message_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_custom__action__pb2._CUSTOMACTION -) -DESCRIPTOR.message_types_by_name["AgentActionProto"] = _AGENTACTIONPROTO -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -AgentActionProto = _reflection.GeneratedProtocolMessageType( - "AgentActionProto", - (_message.Message,), - dict( - DESCRIPTOR=_AGENTACTIONPROTO, - __module__="mlagents.envs.communicator_objects.agent_action_proto_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.AgentActionProto) - ), -) -_sym_db.RegisterMessage(AgentActionProto) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/agent_info_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/agent_info_pb2.py new file mode 100644 index 0000000000..8818a369fd --- /dev/null +++ b/ml-agents-envs/mlagents/envs/communicator_objects/agent_info_pb2.py @@ -0,0 +1,153 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: mlagents/envs/communicator_objects/agent_info.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from mlagents.envs.communicator_objects import compressed_observation_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_compressed__observation__pb2 +from mlagents.envs.communicator_objects import custom_observation_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_custom__observation__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='mlagents/envs/communicator_objects/agent_info.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n3mlagents/envs/communicator_objects/agent_info.proto\x12\x14\x63ommunicator_objects\x1a?mlagents/envs/communicator_objects/compressed_observation.proto\x1a;mlagents/envs/communicator_objects/custom_observation.proto\"\x98\x03\n\x0e\x41gentInfoProto\x12\"\n\x1astacked_vector_observation\x18\x01 \x03(\x02\x12\x18\n\x10text_observation\x18\x03 \x01(\t\x12\x1d\n\x15stored_vector_actions\x18\x04 \x03(\x02\x12\x1b\n\x13stored_text_actions\x18\x05 \x01(\t\x12\x10\n\x08memories\x18\x06 \x03(\x02\x12\x0e\n\x06reward\x18\x07 \x01(\x02\x12\x0c\n\x04\x64one\x18\x08 \x01(\x08\x12\x18\n\x10max_step_reached\x18\t \x01(\x08\x12\n\n\x02id\x18\n \x01(\x05\x12\x13\n\x0b\x61\x63tion_mask\x18\x0b \x03(\x08\x12H\n\x12\x63ustom_observation\x18\x0c \x01(\x0b\x32,.communicator_objects.CustomObservationProto\x12Q\n\x17\x63ompressed_observations\x18\r \x03(\x0b\x32\x30.communicator_objects.CompressedObservationProtoJ\x04\x08\x02\x10\x03\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') + , + dependencies=[mlagents_dot_envs_dot_communicator__objects_dot_compressed__observation__pb2.DESCRIPTOR,mlagents_dot_envs_dot_communicator__objects_dot_custom__observation__pb2.DESCRIPTOR,]) + + + + +_AGENTINFOPROTO = _descriptor.Descriptor( + name='AgentInfoProto', + full_name='communicator_objects.AgentInfoProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='stacked_vector_observation', full_name='communicator_objects.AgentInfoProto.stacked_vector_observation', index=0, + number=1, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='text_observation', full_name='communicator_objects.AgentInfoProto.text_observation', index=1, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stored_vector_actions', full_name='communicator_objects.AgentInfoProto.stored_vector_actions', index=2, + number=4, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='stored_text_actions', full_name='communicator_objects.AgentInfoProto.stored_text_actions', index=3, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='memories', full_name='communicator_objects.AgentInfoProto.memories', index=4, + number=6, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='reward', full_name='communicator_objects.AgentInfoProto.reward', index=5, + number=7, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='done', full_name='communicator_objects.AgentInfoProto.done', index=6, + number=8, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_step_reached', full_name='communicator_objects.AgentInfoProto.max_step_reached', index=7, + number=9, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='id', full_name='communicator_objects.AgentInfoProto.id', index=8, + number=10, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='action_mask', full_name='communicator_objects.AgentInfoProto.action_mask', index=9, + number=11, type=8, cpp_type=7, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='custom_observation', full_name='communicator_objects.AgentInfoProto.custom_observation', index=10, + number=12, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='compressed_observations', full_name='communicator_objects.AgentInfoProto.compressed_observations', index=11, + number=13, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=204, + serialized_end=612, +) + +_AGENTINFOPROTO.fields_by_name['custom_observation'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_custom__observation__pb2._CUSTOMOBSERVATIONPROTO +_AGENTINFOPROTO.fields_by_name['compressed_observations'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_compressed__observation__pb2._COMPRESSEDOBSERVATIONPROTO +DESCRIPTOR.message_types_by_name['AgentInfoProto'] = _AGENTINFOPROTO +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +AgentInfoProto = _reflection.GeneratedProtocolMessageType('AgentInfoProto', (_message.Message,), dict( + DESCRIPTOR = _AGENTINFOPROTO, + __module__ = 'mlagents.envs.communicator_objects.agent_info_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.AgentInfoProto) + )) +_sym_db.RegisterMessage(AgentInfoProto) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) +# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/agent_info_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/agent_info_pb2.pyi new file mode 100644 index 0000000000..4aaa82c3a7 --- /dev/null +++ b/ml-agents-envs/mlagents/envs/communicator_objects/agent_info_pb2.pyi @@ -0,0 +1,84 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, + RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from mlagents.envs.communicator_objects.compressed_observation_pb2 import ( + CompressedObservationProto as mlagents___envs___communicator_objects___compressed_observation_pb2___CompressedObservationProto, +) + +from mlagents.envs.communicator_objects.custom_observation_pb2 import ( + CustomObservationProto as mlagents___envs___communicator_objects___custom_observation_pb2___CustomObservationProto, +) + +from typing import ( + Iterable as typing___Iterable, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + +class AgentInfoProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + stacked_vector_observation = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___float] + text_observation = ... # type: typing___Text + stored_vector_actions = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___float] + stored_text_actions = ... # type: typing___Text + memories = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___float] + reward = ... # type: builtin___float + done = ... # type: builtin___bool + max_step_reached = ... # type: builtin___bool + id = ... # type: builtin___int + action_mask = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___bool] + + @property + def custom_observation(self) -> mlagents___envs___communicator_objects___custom_observation_pb2___CustomObservationProto: ... + + @property + def compressed_observations(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[mlagents___envs___communicator_objects___compressed_observation_pb2___CompressedObservationProto]: ... + + def __init__(self, + *, + stacked_vector_observation : typing___Optional[typing___Iterable[builtin___float]] = None, + text_observation : typing___Optional[typing___Text] = None, + stored_vector_actions : typing___Optional[typing___Iterable[builtin___float]] = None, + stored_text_actions : typing___Optional[typing___Text] = None, + memories : typing___Optional[typing___Iterable[builtin___float]] = None, + reward : typing___Optional[builtin___float] = None, + done : typing___Optional[builtin___bool] = None, + max_step_reached : typing___Optional[builtin___bool] = None, + id : typing___Optional[builtin___int] = None, + action_mask : typing___Optional[typing___Iterable[builtin___bool]] = None, + custom_observation : typing___Optional[mlagents___envs___communicator_objects___custom_observation_pb2___CustomObservationProto] = None, + compressed_observations : typing___Optional[typing___Iterable[mlagents___envs___communicator_objects___compressed_observation_pb2___CompressedObservationProto]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: builtin___bytes) -> AgentInfoProto: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"custom_observation"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"action_mask",u"compressed_observations",u"custom_observation",u"done",u"id",u"max_step_reached",u"memories",u"reward",u"stacked_vector_observation",u"stored_text_actions",u"stored_vector_actions",u"text_observation"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"custom_observation",b"custom_observation"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"action_mask",b"action_mask",u"compressed_observations",b"compressed_observations",u"custom_observation",b"custom_observation",u"done",b"done",u"id",b"id",u"max_step_reached",b"max_step_reached",u"memories",b"memories",u"reward",b"reward",u"stacked_vector_observation",b"stacked_vector_observation",u"stored_text_actions",b"stored_text_actions",u"stored_vector_actions",b"stored_vector_actions",u"text_observation",b"text_observation"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/agent_info_proto_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/agent_info_proto_pb2.py deleted file mode 100644 index 17859abe0e..0000000000 --- a/ml-agents-envs/mlagents/envs/communicator_objects/agent_info_proto_pb2.py +++ /dev/null @@ -1,294 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: mlagents/envs/communicator_objects/agent_info_proto.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from mlagents.envs.communicator_objects import ( - custom_observation_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_custom__observation__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/agent_info_proto.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\n9mlagents/envs/communicator_objects/agent_info_proto.proto\x12\x14\x63ommunicator_objects\x1a;mlagents/envs/communicator_objects/custom_observation.proto"\xd7\x02\n\x0e\x41gentInfoProto\x12"\n\x1astacked_vector_observation\x18\x01 \x03(\x02\x12\x1b\n\x13visual_observations\x18\x02 \x03(\x0c\x12\x18\n\x10text_observation\x18\x03 \x01(\t\x12\x1d\n\x15stored_vector_actions\x18\x04 \x03(\x02\x12\x1b\n\x13stored_text_actions\x18\x05 \x01(\t\x12\x10\n\x08memories\x18\x06 \x03(\x02\x12\x0e\n\x06reward\x18\x07 \x01(\x02\x12\x0c\n\x04\x64one\x18\x08 \x01(\x08\x12\x18\n\x10max_step_reached\x18\t \x01(\x08\x12\n\n\x02id\x18\n \x01(\x05\x12\x13\n\x0b\x61\x63tion_mask\x18\x0b \x03(\x08\x12\x43\n\x12\x63ustom_observation\x18\x0c \x01(\x0b\x32\'.communicator_objects.CustomObservationB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), - dependencies=[ - mlagents_dot_envs_dot_communicator__objects_dot_custom__observation__pb2.DESCRIPTOR - ], -) - - -_AGENTINFOPROTO = _descriptor.Descriptor( - name="AgentInfoProto", - full_name="communicator_objects.AgentInfoProto", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="stacked_vector_observation", - full_name="communicator_objects.AgentInfoProto.stacked_vector_observation", - index=0, - number=1, - type=2, - cpp_type=6, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="visual_observations", - full_name="communicator_objects.AgentInfoProto.visual_observations", - index=1, - number=2, - type=12, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="text_observation", - full_name="communicator_objects.AgentInfoProto.text_observation", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="stored_vector_actions", - full_name="communicator_objects.AgentInfoProto.stored_vector_actions", - index=3, - number=4, - type=2, - cpp_type=6, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="stored_text_actions", - full_name="communicator_objects.AgentInfoProto.stored_text_actions", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="memories", - full_name="communicator_objects.AgentInfoProto.memories", - index=5, - number=6, - type=2, - cpp_type=6, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="reward", - full_name="communicator_objects.AgentInfoProto.reward", - index=6, - number=7, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="done", - full_name="communicator_objects.AgentInfoProto.done", - index=7, - number=8, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="max_step_reached", - full_name="communicator_objects.AgentInfoProto.max_step_reached", - index=8, - number=9, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="id", - full_name="communicator_objects.AgentInfoProto.id", - index=9, - number=10, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="action_mask", - full_name="communicator_objects.AgentInfoProto.action_mask", - index=10, - number=11, - type=8, - cpp_type=7, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="custom_observation", - full_name="communicator_objects.AgentInfoProto.custom_observation", - index=11, - number=12, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=145, - serialized_end=488, -) - -_AGENTINFOPROTO.fields_by_name[ - "custom_observation" -].message_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_custom__observation__pb2._CUSTOMOBSERVATION -) -DESCRIPTOR.message_types_by_name["AgentInfoProto"] = _AGENTINFOPROTO -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -AgentInfoProto = _reflection.GeneratedProtocolMessageType( - "AgentInfoProto", - (_message.Message,), - dict( - DESCRIPTOR=_AGENTINFOPROTO, - __module__="mlagents.envs.communicator_objects.agent_info_proto_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.AgentInfoProto) - ), -) -_sym_db.RegisterMessage(AgentInfoProto) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/agent_info_proto_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/agent_info_proto_pb2.pyi deleted file mode 100644 index 595aa70ef5..0000000000 --- a/ml-agents-envs/mlagents/envs/communicator_objects/agent_info_proto_pb2.pyi +++ /dev/null @@ -1,65 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from google.protobuf.internal.containers import ( - RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from mlagents.envs.communicator_objects.custom_observation_pb2 import ( - CustomObservation as mlagents___envs___communicator_objects___custom_observation_pb2___CustomObservation, -) - -from typing import ( - Iterable as typing___Iterable, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class AgentInfoProto(google___protobuf___message___Message): - stacked_vector_observation = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[float] - visual_observations = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes] - text_observation = ... # type: typing___Text - stored_vector_actions = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[float] - stored_text_actions = ... # type: typing___Text - memories = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[float] - reward = ... # type: float - done = ... # type: bool - max_step_reached = ... # type: bool - id = ... # type: int - action_mask = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bool] - - @property - def custom_observation(self) -> mlagents___envs___communicator_objects___custom_observation_pb2___CustomObservation: ... - - def __init__(self, - stacked_vector_observation : typing___Optional[typing___Iterable[float]] = None, - visual_observations : typing___Optional[typing___Iterable[bytes]] = None, - text_observation : typing___Optional[typing___Text] = None, - stored_vector_actions : typing___Optional[typing___Iterable[float]] = None, - stored_text_actions : typing___Optional[typing___Text] = None, - memories : typing___Optional[typing___Iterable[float]] = None, - reward : typing___Optional[float] = None, - done : typing___Optional[bool] = None, - max_step_reached : typing___Optional[bool] = None, - id : typing___Optional[int] = None, - action_mask : typing___Optional[typing___Iterable[bool]] = None, - custom_observation : typing___Optional[mlagents___envs___communicator_objects___custom_observation_pb2___CustomObservation] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> AgentInfoProto: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"custom_observation"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"action_mask",u"custom_observation",u"done",u"id",u"max_step_reached",u"memories",u"reward",u"stacked_vector_observation",u"stored_text_actions",u"stored_vector_actions",u"text_observation",u"visual_observations"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"custom_observation",b"custom_observation"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"action_mask",b"custom_observation",b"done",b"id",b"max_step_reached",b"memories",b"reward",b"stacked_vector_observation",b"stored_text_actions",b"stored_vector_actions",b"text_observation",b"visual_observations"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/brain_parameters_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/brain_parameters_pb2.py new file mode 100644 index 0000000000..31007b6869 --- /dev/null +++ b/ml-agents-envs/mlagents/envs/communicator_objects/brain_parameters_pb2.py @@ -0,0 +1,116 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: mlagents/envs/communicator_objects/brain_parameters.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from mlagents.envs.communicator_objects import space_type_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_space__type__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='mlagents/envs/communicator_objects/brain_parameters.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n9mlagents/envs/communicator_objects/brain_parameters.proto\x12\x14\x63ommunicator_objects\x1a\x33mlagents/envs/communicator_objects/space_type.proto\"\x97\x02\n\x14\x42rainParametersProto\x12\x1f\n\x17vector_observation_size\x18\x01 \x01(\x05\x12\'\n\x1fnum_stacked_vector_observations\x18\x02 \x01(\x05\x12\x1a\n\x12vector_action_size\x18\x03 \x03(\x05\x12\"\n\x1avector_action_descriptions\x18\x05 \x03(\t\x12\x46\n\x18vector_action_space_type\x18\x06 \x01(\x0e\x32$.communicator_objects.SpaceTypeProto\x12\x12\n\nbrain_name\x18\x07 \x01(\t\x12\x13\n\x0bis_training\x18\x08 \x01(\x08J\x04\x08\x04\x10\x05\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') + , + dependencies=[mlagents_dot_envs_dot_communicator__objects_dot_space__type__pb2.DESCRIPTOR,]) + + + + +_BRAINPARAMETERSPROTO = _descriptor.Descriptor( + name='BrainParametersProto', + full_name='communicator_objects.BrainParametersProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='vector_observation_size', full_name='communicator_objects.BrainParametersProto.vector_observation_size', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_stacked_vector_observations', full_name='communicator_objects.BrainParametersProto.num_stacked_vector_observations', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='vector_action_size', full_name='communicator_objects.BrainParametersProto.vector_action_size', index=2, + number=3, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='vector_action_descriptions', full_name='communicator_objects.BrainParametersProto.vector_action_descriptions', index=3, + number=5, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='vector_action_space_type', full_name='communicator_objects.BrainParametersProto.vector_action_space_type', index=4, + number=6, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='brain_name', full_name='communicator_objects.BrainParametersProto.brain_name', index=5, + number=7, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='is_training', full_name='communicator_objects.BrainParametersProto.is_training', index=6, + number=8, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=137, + serialized_end=416, +) + +_BRAINPARAMETERSPROTO.fields_by_name['vector_action_space_type'].enum_type = mlagents_dot_envs_dot_communicator__objects_dot_space__type__pb2._SPACETYPEPROTO +DESCRIPTOR.message_types_by_name['BrainParametersProto'] = _BRAINPARAMETERSPROTO +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +BrainParametersProto = _reflection.GeneratedProtocolMessageType('BrainParametersProto', (_message.Message,), dict( + DESCRIPTOR = _BRAINPARAMETERSPROTO, + __module__ = 'mlagents.envs.communicator_objects.brain_parameters_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.BrainParametersProto) + )) +_sym_db.RegisterMessage(BrainParametersProto) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) +# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/brain_parameters_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/brain_parameters_pb2.pyi new file mode 100644 index 0000000000..f3d1c30597 --- /dev/null +++ b/ml-agents-envs/mlagents/envs/communicator_objects/brain_parameters_pb2.pyi @@ -0,0 +1,63 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + +from google.protobuf.internal.containers import ( + RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from mlagents.envs.communicator_objects.space_type_pb2 import ( + SpaceTypeProto as mlagents___envs___communicator_objects___space_type_pb2___SpaceTypeProto, +) + +from typing import ( + Iterable as typing___Iterable, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + +class BrainParametersProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + vector_observation_size = ... # type: builtin___int + num_stacked_vector_observations = ... # type: builtin___int + vector_action_size = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___int] + vector_action_descriptions = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + vector_action_space_type = ... # type: mlagents___envs___communicator_objects___space_type_pb2___SpaceTypeProto + brain_name = ... # type: typing___Text + is_training = ... # type: builtin___bool + + def __init__(self, + *, + vector_observation_size : typing___Optional[builtin___int] = None, + num_stacked_vector_observations : typing___Optional[builtin___int] = None, + vector_action_size : typing___Optional[typing___Iterable[builtin___int]] = None, + vector_action_descriptions : typing___Optional[typing___Iterable[typing___Text]] = None, + vector_action_space_type : typing___Optional[mlagents___envs___communicator_objects___space_type_pb2___SpaceTypeProto] = None, + brain_name : typing___Optional[typing___Text] = None, + is_training : typing___Optional[builtin___bool] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: builtin___bytes) -> BrainParametersProto: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"brain_name",u"is_training",u"num_stacked_vector_observations",u"vector_action_descriptions",u"vector_action_size",u"vector_action_space_type",u"vector_observation_size"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"brain_name",b"brain_name",u"is_training",b"is_training",u"num_stacked_vector_observations",b"num_stacked_vector_observations",u"vector_action_descriptions",b"vector_action_descriptions",u"vector_action_size",b"vector_action_size",u"vector_action_space_type",b"vector_action_space_type",u"vector_observation_size",b"vector_observation_size"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/brain_parameters_proto_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/brain_parameters_proto_pb2.py deleted file mode 100644 index bb2c4b650c..0000000000 --- a/ml-agents-envs/mlagents/envs/communicator_objects/brain_parameters_proto_pb2.py +++ /dev/null @@ -1,231 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: mlagents/envs/communicator_objects/brain_parameters_proto.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from mlagents.envs.communicator_objects import ( - resolution_proto_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_resolution__proto__pb2, -) -from mlagents.envs.communicator_objects import ( - space_type_proto_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_space__type__proto__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/brain_parameters_proto.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\n?mlagents/envs/communicator_objects/brain_parameters_proto.proto\x12\x14\x63ommunicator_objects\x1a\x39mlagents/envs/communicator_objects/resolution_proto.proto\x1a\x39mlagents/envs/communicator_objects/space_type_proto.proto"\xd4\x02\n\x14\x42rainParametersProto\x12\x1f\n\x17vector_observation_size\x18\x01 \x01(\x05\x12\'\n\x1fnum_stacked_vector_observations\x18\x02 \x01(\x05\x12\x1a\n\x12vector_action_size\x18\x03 \x03(\x05\x12\x41\n\x12\x63\x61mera_resolutions\x18\x04 \x03(\x0b\x32%.communicator_objects.ResolutionProto\x12"\n\x1avector_action_descriptions\x18\x05 \x03(\t\x12\x46\n\x18vector_action_space_type\x18\x06 \x01(\x0e\x32$.communicator_objects.SpaceTypeProto\x12\x12\n\nbrain_name\x18\x07 \x01(\t\x12\x13\n\x0bis_training\x18\x08 \x01(\x08\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), - dependencies=[ - mlagents_dot_envs_dot_communicator__objects_dot_resolution__proto__pb2.DESCRIPTOR, - mlagents_dot_envs_dot_communicator__objects_dot_space__type__proto__pb2.DESCRIPTOR, - ], -) - - -_BRAINPARAMETERSPROTO = _descriptor.Descriptor( - name="BrainParametersProto", - full_name="communicator_objects.BrainParametersProto", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="vector_observation_size", - full_name="communicator_objects.BrainParametersProto.vector_observation_size", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="num_stacked_vector_observations", - full_name="communicator_objects.BrainParametersProto.num_stacked_vector_observations", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="vector_action_size", - full_name="communicator_objects.BrainParametersProto.vector_action_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="camera_resolutions", - full_name="communicator_objects.BrainParametersProto.camera_resolutions", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="vector_action_descriptions", - full_name="communicator_objects.BrainParametersProto.vector_action_descriptions", - index=4, - number=5, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="vector_action_space_type", - full_name="communicator_objects.BrainParametersProto.vector_action_space_type", - index=5, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="brain_name", - full_name="communicator_objects.BrainParametersProto.brain_name", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="is_training", - full_name="communicator_objects.BrainParametersProto.is_training", - index=7, - number=8, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=208, - serialized_end=548, -) - -_BRAINPARAMETERSPROTO.fields_by_name[ - "camera_resolutions" -].message_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_resolution__proto__pb2._RESOLUTIONPROTO -) -_BRAINPARAMETERSPROTO.fields_by_name[ - "vector_action_space_type" -].enum_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_space__type__proto__pb2._SPACETYPEPROTO -) -DESCRIPTOR.message_types_by_name["BrainParametersProto"] = _BRAINPARAMETERSPROTO -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -BrainParametersProto = _reflection.GeneratedProtocolMessageType( - "BrainParametersProto", - (_message.Message,), - dict( - DESCRIPTOR=_BRAINPARAMETERSPROTO, - __module__="mlagents.envs.communicator_objects.brain_parameters_proto_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.BrainParametersProto) - ), -) -_sym_db.RegisterMessage(BrainParametersProto) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/brain_parameters_proto_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/brain_parameters_proto_pb2.pyi deleted file mode 100644 index ce3d0da775..0000000000 --- a/ml-agents-envs/mlagents/envs/communicator_objects/brain_parameters_proto_pb2.pyi +++ /dev/null @@ -1,60 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from google.protobuf.internal.containers import ( - RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, - RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from mlagents.envs.communicator_objects.resolution_proto_pb2 import ( - ResolutionProto as mlagents___envs___communicator_objects___resolution_proto_pb2___ResolutionProto, -) - -from mlagents.envs.communicator_objects.space_type_proto_pb2 import ( - SpaceTypeProto as mlagents___envs___communicator_objects___space_type_proto_pb2___SpaceTypeProto, -) - -from typing import ( - Iterable as typing___Iterable, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class BrainParametersProto(google___protobuf___message___Message): - vector_observation_size = ... # type: int - num_stacked_vector_observations = ... # type: int - vector_action_size = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[int] - vector_action_descriptions = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] - vector_action_space_type = ... # type: mlagents___envs___communicator_objects___space_type_proto_pb2___SpaceTypeProto - brain_name = ... # type: typing___Text - is_training = ... # type: bool - - @property - def camera_resolutions(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[mlagents___envs___communicator_objects___resolution_proto_pb2___ResolutionProto]: ... - - def __init__(self, - vector_observation_size : typing___Optional[int] = None, - num_stacked_vector_observations : typing___Optional[int] = None, - vector_action_size : typing___Optional[typing___Iterable[int]] = None, - camera_resolutions : typing___Optional[typing___Iterable[mlagents___envs___communicator_objects___resolution_proto_pb2___ResolutionProto]] = None, - vector_action_descriptions : typing___Optional[typing___Iterable[typing___Text]] = None, - vector_action_space_type : typing___Optional[mlagents___envs___communicator_objects___space_type_proto_pb2___SpaceTypeProto] = None, - brain_name : typing___Optional[typing___Text] = None, - is_training : typing___Optional[bool] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> BrainParametersProto: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"brain_name",u"camera_resolutions",u"is_training",u"num_stacked_vector_observations",u"vector_action_descriptions",u"vector_action_size",u"vector_action_space_type",u"vector_observation_size"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"brain_name",b"camera_resolutions",b"is_training",b"num_stacked_vector_observations",b"vector_action_descriptions",b"vector_action_size",b"vector_action_space_type",b"vector_observation_size"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/command_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/command_pb2.py new file mode 100644 index 0000000000..6355b8f4b2 --- /dev/null +++ b/ml-agents-envs/mlagents/envs/communicator_objects/command_pb2.py @@ -0,0 +1,64 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: mlagents/envs/communicator_objects/command.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='mlagents/envs/communicator_objects/command.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n0mlagents/envs/communicator_objects/command.proto\x12\x14\x63ommunicator_objects*-\n\x0c\x43ommandProto\x12\x08\n\x04STEP\x10\x00\x12\t\n\x05RESET\x10\x01\x12\x08\n\x04QUIT\x10\x02\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') +) + +_COMMANDPROTO = _descriptor.EnumDescriptor( + name='CommandProto', + full_name='communicator_objects.CommandProto', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='STEP', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RESET', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='QUIT', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=74, + serialized_end=119, +) +_sym_db.RegisterEnumDescriptor(_COMMANDPROTO) + +CommandProto = enum_type_wrapper.EnumTypeWrapper(_COMMANDPROTO) +STEP = 0 +RESET = 1 +QUIT = 2 + + +DESCRIPTOR.enum_types_by_name['CommandProto'] = _COMMANDPROTO +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) +# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/command_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/command_pb2.pyi new file mode 100644 index 0000000000..4378a8a698 --- /dev/null +++ b/ml-agents-envs/mlagents/envs/communicator_objects/command_pb2.pyi @@ -0,0 +1,39 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.descriptor import ( + EnumDescriptor as google___protobuf___descriptor___EnumDescriptor, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + List as typing___List, + Tuple as typing___Tuple, + cast as typing___cast, +) + + +builtin___int = int +builtin___str = str + + +class CommandProto(builtin___int): + DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... + @classmethod + def Name(cls, number: builtin___int) -> builtin___str: ... + @classmethod + def Value(cls, name: builtin___str) -> 'CommandProto': ... + @classmethod + def keys(cls) -> typing___List[builtin___str]: ... + @classmethod + def values(cls) -> typing___List['CommandProto']: ... + @classmethod + def items(cls) -> typing___List[typing___Tuple[builtin___str, 'CommandProto']]: ... + STEP = typing___cast('CommandProto', 0) + RESET = typing___cast('CommandProto', 1) + QUIT = typing___cast('CommandProto', 2) +STEP = typing___cast('CommandProto', 0) +RESET = typing___cast('CommandProto', 1) +QUIT = typing___cast('CommandProto', 2) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/command_proto_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/command_proto_pb2.py deleted file mode 100644 index 9815698619..0000000000 --- a/ml-agents-envs/mlagents/envs/communicator_objects/command_proto_pb2.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: mlagents/envs/communicator_objects/command_proto.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/command_proto.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - "\n6mlagents/envs/communicator_objects/command_proto.proto\x12\x14\x63ommunicator_objects*-\n\x0c\x43ommandProto\x12\x08\n\x04STEP\x10\x00\x12\t\n\x05RESET\x10\x01\x12\x08\n\x04QUIT\x10\x02\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3" - ), -) - -_COMMANDPROTO = _descriptor.EnumDescriptor( - name="CommandProto", - full_name="communicator_objects.CommandProto", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="STEP", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RESET", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="QUIT", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=80, - serialized_end=125, -) -_sym_db.RegisterEnumDescriptor(_COMMANDPROTO) - -CommandProto = enum_type_wrapper.EnumTypeWrapper(_COMMANDPROTO) -STEP = 0 -RESET = 1 -QUIT = 2 - - -DESCRIPTOR.enum_types_by_name["CommandProto"] = _COMMANDPROTO -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/command_proto_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/command_proto_pb2.pyi deleted file mode 100644 index 7672e0ac55..0000000000 --- a/ml-agents-envs/mlagents/envs/communicator_objects/command_proto_pb2.pyi +++ /dev/null @@ -1,32 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from google.protobuf.descriptor import ( - EnumDescriptor as google___protobuf___descriptor___EnumDescriptor, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from typing import ( - List as typing___List, - Tuple as typing___Tuple, - cast as typing___cast, -) - - -class CommandProto(int): - DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... - @classmethod - def Name(cls, number: int) -> str: ... - @classmethod - def Value(cls, name: str) -> CommandProto: ... - @classmethod - def keys(cls) -> typing___List[str]: ... - @classmethod - def values(cls) -> typing___List[CommandProto]: ... - @classmethod - def items(cls) -> typing___List[typing___Tuple[str, CommandProto]]: ... -STEP = typing___cast(CommandProto, 0) -RESET = typing___cast(CommandProto, 1) -QUIT = typing___cast(CommandProto, 2) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/compressed_observation_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/compressed_observation_pb2.py new file mode 100644 index 0000000000..5ff3611f53 --- /dev/null +++ b/ml-agents-envs/mlagents/envs/communicator_objects/compressed_observation_pb2.py @@ -0,0 +1,113 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: mlagents/envs/communicator_objects/compressed_observation.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='mlagents/envs/communicator_objects/compressed_observation.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n?mlagents/envs/communicator_objects/compressed_observation.proto\x12\x14\x63ommunicator_objects\"\x7f\n\x1a\x43ompressedObservationProto\x12\r\n\x05shape\x18\x01 \x03(\x05\x12\x44\n\x10\x63ompression_type\x18\x02 \x01(\x0e\x32*.communicator_objects.CompressionTypeProto\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c*)\n\x14\x43ompressionTypeProto\x12\x08\n\x04NONE\x10\x00\x12\x07\n\x03PNG\x10\x01\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') +) + +_COMPRESSIONTYPEPROTO = _descriptor.EnumDescriptor( + name='CompressionTypeProto', + full_name='communicator_objects.CompressionTypeProto', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NONE', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PNG', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=218, + serialized_end=259, +) +_sym_db.RegisterEnumDescriptor(_COMPRESSIONTYPEPROTO) + +CompressionTypeProto = enum_type_wrapper.EnumTypeWrapper(_COMPRESSIONTYPEPROTO) +NONE = 0 +PNG = 1 + + + +_COMPRESSEDOBSERVATIONPROTO = _descriptor.Descriptor( + name='CompressedObservationProto', + full_name='communicator_objects.CompressedObservationProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='shape', full_name='communicator_objects.CompressedObservationProto.shape', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='compression_type', full_name='communicator_objects.CompressedObservationProto.compression_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='data', full_name='communicator_objects.CompressedObservationProto.data', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=89, + serialized_end=216, +) + +_COMPRESSEDOBSERVATIONPROTO.fields_by_name['compression_type'].enum_type = _COMPRESSIONTYPEPROTO +DESCRIPTOR.message_types_by_name['CompressedObservationProto'] = _COMPRESSEDOBSERVATIONPROTO +DESCRIPTOR.enum_types_by_name['CompressionTypeProto'] = _COMPRESSIONTYPEPROTO +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +CompressedObservationProto = _reflection.GeneratedProtocolMessageType('CompressedObservationProto', (_message.Message,), dict( + DESCRIPTOR = _COMPRESSEDOBSERVATIONPROTO, + __module__ = 'mlagents.envs.communicator_objects.compressed_observation_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.CompressedObservationProto) + )) +_sym_db.RegisterMessage(CompressedObservationProto) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) +# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/compressed_observation_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/compressed_observation_pb2.pyi new file mode 100644 index 0000000000..41a3e719e4 --- /dev/null +++ b/ml-agents-envs/mlagents/envs/communicator_objects/compressed_observation_pb2.pyi @@ -0,0 +1,72 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, + EnumDescriptor as google___protobuf___descriptor___EnumDescriptor, +) + +from google.protobuf.internal.containers import ( + RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Iterable as typing___Iterable, + List as typing___List, + Optional as typing___Optional, + Tuple as typing___Tuple, + cast as typing___cast, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int +builtin___str = str + + +class CompressionTypeProto(builtin___int): + DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... + @classmethod + def Name(cls, number: builtin___int) -> builtin___str: ... + @classmethod + def Value(cls, name: builtin___str) -> 'CompressionTypeProto': ... + @classmethod + def keys(cls) -> typing___List[builtin___str]: ... + @classmethod + def values(cls) -> typing___List['CompressionTypeProto']: ... + @classmethod + def items(cls) -> typing___List[typing___Tuple[builtin___str, 'CompressionTypeProto']]: ... + NONE = typing___cast('CompressionTypeProto', 0) + PNG = typing___cast('CompressionTypeProto', 1) +NONE = typing___cast('CompressionTypeProto', 0) +PNG = typing___cast('CompressionTypeProto', 1) + +class CompressedObservationProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + shape = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___int] + compression_type = ... # type: CompressionTypeProto + data = ... # type: builtin___bytes + + def __init__(self, + *, + shape : typing___Optional[typing___Iterable[builtin___int]] = None, + compression_type : typing___Optional[CompressionTypeProto] = None, + data : typing___Optional[builtin___bytes] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: builtin___bytes) -> CompressedObservationProto: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"compression_type",u"data",u"shape"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"compression_type",b"compression_type",u"data",b"data",u"shape",b"shape"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/custom_action_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/custom_action_pb2.py index 1c16809b0b..ecead71d76 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/custom_action_pb2.py +++ b/ml-agents-envs/mlagents/envs/communicator_objects/custom_action_pb2.py @@ -1,64 +1,64 @@ -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: mlagents/envs/communicator_objects/custom_action.proto import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database - +from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() + + DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/custom_action.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\n6mlagents/envs/communicator_objects/custom_action.proto\x12\x14\x63ommunicator_objects"\x0e\n\x0c\x43ustomActionB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), + name='mlagents/envs/communicator_objects/custom_action.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n6mlagents/envs/communicator_objects/custom_action.proto\x12\x14\x63ommunicator_objects\"\x13\n\x11\x43ustomActionProtoB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') ) -_CUSTOMACTION = _descriptor.Descriptor( - name="CustomAction", - full_name="communicator_objects.CustomAction", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=80, - serialized_end=94, + + +_CUSTOMACTIONPROTO = _descriptor.Descriptor( + name='CustomActionProto', + full_name='communicator_objects.CustomActionProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=80, + serialized_end=99, ) -DESCRIPTOR.message_types_by_name["CustomAction"] = _CUSTOMACTION +DESCRIPTOR.message_types_by_name['CustomActionProto'] = _CUSTOMACTIONPROTO _sym_db.RegisterFileDescriptor(DESCRIPTOR) -CustomAction = _reflection.GeneratedProtocolMessageType( - "CustomAction", - (_message.Message,), - dict( - DESCRIPTOR=_CUSTOMACTION, - __module__="mlagents.envs.communicator_objects.custom_action_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.CustomAction) - ), -) -_sym_db.RegisterMessage(CustomAction) +CustomActionProto = _reflection.GeneratedProtocolMessageType('CustomActionProto', (_message.Message,), dict( + DESCRIPTOR = _CUSTOMACTIONPROTO, + __module__ = 'mlagents.envs.communicator_objects.custom_action_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.CustomActionProto) + )) +_sym_db.RegisterMessage(CustomActionProto) -DESCRIPTOR._options = None +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) # @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/custom_action_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/custom_action_pb2.pyi index 0c328c343b..2d834a3133 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/custom_action_pb2.pyi +++ b/ml-agents-envs/mlagents/envs/communicator_objects/custom_action_pb2.pyi @@ -1,15 +1,23 @@ # @generated by generate_proto_mypy_stubs.py. Do not edit! import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + from google.protobuf.message import ( Message as google___protobuf___message___Message, ) -class CustomAction(google___protobuf___message___Message): +builtin___bytes = bytes + + +class CustomActionProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... def __init__(self, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> CustomAction: ... + def FromString(cls, s: builtin___bytes) -> CustomActionProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/custom_observation_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/custom_observation_pb2.py index 9c320f8c6c..d0f89db251 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/custom_observation_pb2.py +++ b/ml-agents-envs/mlagents/envs/communicator_objects/custom_observation_pb2.py @@ -1,64 +1,64 @@ -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: mlagents/envs/communicator_objects/custom_observation.proto import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database - +from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() + + DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/custom_observation.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\n;mlagents/envs/communicator_objects/custom_observation.proto\x12\x14\x63ommunicator_objects"\x13\n\x11\x43ustomObservationB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), + name='mlagents/envs/communicator_objects/custom_observation.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n;mlagents/envs/communicator_objects/custom_observation.proto\x12\x14\x63ommunicator_objects\"\x18\n\x16\x43ustomObservationProtoB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') ) -_CUSTOMOBSERVATION = _descriptor.Descriptor( - name="CustomObservation", - full_name="communicator_objects.CustomObservation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=85, - serialized_end=104, + + +_CUSTOMOBSERVATIONPROTO = _descriptor.Descriptor( + name='CustomObservationProto', + full_name='communicator_objects.CustomObservationProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=85, + serialized_end=109, ) -DESCRIPTOR.message_types_by_name["CustomObservation"] = _CUSTOMOBSERVATION +DESCRIPTOR.message_types_by_name['CustomObservationProto'] = _CUSTOMOBSERVATIONPROTO _sym_db.RegisterFileDescriptor(DESCRIPTOR) -CustomObservation = _reflection.GeneratedProtocolMessageType( - "CustomObservation", - (_message.Message,), - dict( - DESCRIPTOR=_CUSTOMOBSERVATION, - __module__="mlagents.envs.communicator_objects.custom_observation_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.CustomObservation) - ), -) -_sym_db.RegisterMessage(CustomObservation) +CustomObservationProto = _reflection.GeneratedProtocolMessageType('CustomObservationProto', (_message.Message,), dict( + DESCRIPTOR = _CUSTOMOBSERVATIONPROTO, + __module__ = 'mlagents.envs.communicator_objects.custom_observation_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.CustomObservationProto) + )) +_sym_db.RegisterMessage(CustomObservationProto) -DESCRIPTOR._options = None +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) # @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/custom_observation_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/custom_observation_pb2.pyi index ccab14e3f2..3e5f324325 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/custom_observation_pb2.pyi +++ b/ml-agents-envs/mlagents/envs/communicator_objects/custom_observation_pb2.pyi @@ -1,15 +1,23 @@ # @generated by generate_proto_mypy_stubs.py. Do not edit! import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + from google.protobuf.message import ( Message as google___protobuf___message___Message, ) -class CustomObservation(google___protobuf___message___Message): +builtin___bytes = bytes + + +class CustomObservationProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... def __init__(self, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> CustomObservation: ... + def FromString(cls, s: builtin___bytes) -> CustomObservationProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/custom_reset_parameters_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/custom_reset_parameters_pb2.py index fd03c0730f..05fa91db71 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/custom_reset_parameters_pb2.py +++ b/ml-agents-envs/mlagents/envs/communicator_objects/custom_reset_parameters_pb2.py @@ -1,64 +1,64 @@ -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: mlagents/envs/communicator_objects/custom_reset_parameters.proto import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database - +from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() + + DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/custom_reset_parameters.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\n@mlagents/envs/communicator_objects/custom_reset_parameters.proto\x12\x14\x63ommunicator_objects"\x17\n\x15\x43ustomResetParametersB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), + name='mlagents/envs/communicator_objects/custom_reset_parameters.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n@mlagents/envs/communicator_objects/custom_reset_parameters.proto\x12\x14\x63ommunicator_objects\"\x1c\n\x1a\x43ustomResetParametersProtoB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') ) -_CUSTOMRESETPARAMETERS = _descriptor.Descriptor( - name="CustomResetParameters", - full_name="communicator_objects.CustomResetParameters", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=90, - serialized_end=113, + + +_CUSTOMRESETPARAMETERSPROTO = _descriptor.Descriptor( + name='CustomResetParametersProto', + full_name='communicator_objects.CustomResetParametersProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=90, + serialized_end=118, ) -DESCRIPTOR.message_types_by_name["CustomResetParameters"] = _CUSTOMRESETPARAMETERS +DESCRIPTOR.message_types_by_name['CustomResetParametersProto'] = _CUSTOMRESETPARAMETERSPROTO _sym_db.RegisterFileDescriptor(DESCRIPTOR) -CustomResetParameters = _reflection.GeneratedProtocolMessageType( - "CustomResetParameters", - (_message.Message,), - dict( - DESCRIPTOR=_CUSTOMRESETPARAMETERS, - __module__="mlagents.envs.communicator_objects.custom_reset_parameters_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.CustomResetParameters) - ), -) -_sym_db.RegisterMessage(CustomResetParameters) +CustomResetParametersProto = _reflection.GeneratedProtocolMessageType('CustomResetParametersProto', (_message.Message,), dict( + DESCRIPTOR = _CUSTOMRESETPARAMETERSPROTO, + __module__ = 'mlagents.envs.communicator_objects.custom_reset_parameters_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.CustomResetParametersProto) + )) +_sym_db.RegisterMessage(CustomResetParametersProto) -DESCRIPTOR._options = None +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) # @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/custom_reset_parameters_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/custom_reset_parameters_pb2.pyi index 26a1da540d..8127e971df 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/custom_reset_parameters_pb2.pyi +++ b/ml-agents-envs/mlagents/envs/communicator_objects/custom_reset_parameters_pb2.pyi @@ -1,15 +1,23 @@ # @generated by generate_proto_mypy_stubs.py. Do not edit! import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + from google.protobuf.message import ( Message as google___protobuf___message___Message, ) -class CustomResetParameters(google___protobuf___message___Message): +builtin___bytes = bytes + + +class CustomResetParametersProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... def __init__(self, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> CustomResetParameters: ... + def FromString(cls, s: builtin___bytes) -> CustomResetParametersProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/demonstration_meta_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/demonstration_meta_pb2.py new file mode 100644 index 0000000000..1214be3465 --- /dev/null +++ b/ml-agents-envs/mlagents/envs/communicator_objects/demonstration_meta_pb2.py @@ -0,0 +1,99 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: mlagents/envs/communicator_objects/demonstration_meta.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='mlagents/envs/communicator_objects/demonstration_meta.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n;mlagents/envs/communicator_objects/demonstration_meta.proto\x12\x14\x63ommunicator_objects\"\x8d\x01\n\x16\x44\x65monstrationMetaProto\x12\x13\n\x0b\x61pi_version\x18\x01 \x01(\x05\x12\x1a\n\x12\x64\x65monstration_name\x18\x02 \x01(\t\x12\x14\n\x0cnumber_steps\x18\x03 \x01(\x05\x12\x17\n\x0fnumber_episodes\x18\x04 \x01(\x05\x12\x13\n\x0bmean_reward\x18\x05 \x01(\x02\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') +) + + + + +_DEMONSTRATIONMETAPROTO = _descriptor.Descriptor( + name='DemonstrationMetaProto', + full_name='communicator_objects.DemonstrationMetaProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='api_version', full_name='communicator_objects.DemonstrationMetaProto.api_version', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='demonstration_name', full_name='communicator_objects.DemonstrationMetaProto.demonstration_name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='number_steps', full_name='communicator_objects.DemonstrationMetaProto.number_steps', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='number_episodes', full_name='communicator_objects.DemonstrationMetaProto.number_episodes', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mean_reward', full_name='communicator_objects.DemonstrationMetaProto.mean_reward', index=4, + number=5, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=86, + serialized_end=227, +) + +DESCRIPTOR.message_types_by_name['DemonstrationMetaProto'] = _DEMONSTRATIONMETAPROTO +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +DemonstrationMetaProto = _reflection.GeneratedProtocolMessageType('DemonstrationMetaProto', (_message.Message,), dict( + DESCRIPTOR = _DEMONSTRATIONMETAPROTO, + __module__ = 'mlagents.envs.communicator_objects.demonstration_meta_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.DemonstrationMetaProto) + )) +_sym_db.RegisterMessage(DemonstrationMetaProto) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) +# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/demonstration_meta_proto_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/demonstration_meta_pb2.pyi similarity index 51% rename from ml-agents-envs/mlagents/envs/communicator_objects/demonstration_meta_proto_pb2.pyi rename to ml-agents-envs/mlagents/envs/communicator_objects/demonstration_meta_pb2.pyi index c905e9927d..ba3c306d51 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/demonstration_meta_proto_pb2.pyi +++ b/ml-agents-envs/mlagents/envs/communicator_objects/demonstration_meta_pb2.pyi @@ -1,5 +1,9 @@ # @generated by generate_proto_mypy_stubs.py. Do not edit! import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + from google.protobuf.message import ( Message as google___protobuf___message___Message, ) @@ -14,25 +18,33 @@ from typing_extensions import ( ) +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + class DemonstrationMetaProto(google___protobuf___message___Message): - api_version = ... # type: int + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + api_version = ... # type: builtin___int demonstration_name = ... # type: typing___Text - number_steps = ... # type: int - number_episodes = ... # type: int - mean_reward = ... # type: float + number_steps = ... # type: builtin___int + number_episodes = ... # type: builtin___int + mean_reward = ... # type: builtin___float def __init__(self, - api_version : typing___Optional[int] = None, + *, + api_version : typing___Optional[builtin___int] = None, demonstration_name : typing___Optional[typing___Text] = None, - number_steps : typing___Optional[int] = None, - number_episodes : typing___Optional[int] = None, - mean_reward : typing___Optional[float] = None, + number_steps : typing___Optional[builtin___int] = None, + number_episodes : typing___Optional[builtin___int] = None, + mean_reward : typing___Optional[builtin___float] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> DemonstrationMetaProto: ... + def FromString(cls, s: builtin___bytes) -> DemonstrationMetaProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): def ClearField(self, field_name: typing_extensions___Literal[u"api_version",u"demonstration_name",u"mean_reward",u"number_episodes",u"number_steps"]) -> None: ... else: - def ClearField(self, field_name: typing_extensions___Literal[b"api_version",b"demonstration_name",b"mean_reward",b"number_episodes",b"number_steps"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"api_version",b"api_version",u"demonstration_name",b"demonstration_name",u"mean_reward",b"mean_reward",u"number_episodes",b"number_episodes",u"number_steps",b"number_steps"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/demonstration_meta_proto_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/demonstration_meta_proto_pb2.py deleted file mode 100644 index 918ca588d7..0000000000 --- a/ml-agents-envs/mlagents/envs/communicator_objects/demonstration_meta_proto_pb2.py +++ /dev/null @@ -1,155 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: mlagents/envs/communicator_objects/demonstration_meta_proto.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/demonstration_meta_proto.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\nAmlagents/envs/communicator_objects/demonstration_meta_proto.proto\x12\x14\x63ommunicator_objects"\x8d\x01\n\x16\x44\x65monstrationMetaProto\x12\x13\n\x0b\x61pi_version\x18\x01 \x01(\x05\x12\x1a\n\x12\x64\x65monstration_name\x18\x02 \x01(\t\x12\x14\n\x0cnumber_steps\x18\x03 \x01(\x05\x12\x17\n\x0fnumber_episodes\x18\x04 \x01(\x05\x12\x13\n\x0bmean_reward\x18\x05 \x01(\x02\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), -) - - -_DEMONSTRATIONMETAPROTO = _descriptor.Descriptor( - name="DemonstrationMetaProto", - full_name="communicator_objects.DemonstrationMetaProto", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="api_version", - full_name="communicator_objects.DemonstrationMetaProto.api_version", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="demonstration_name", - full_name="communicator_objects.DemonstrationMetaProto.demonstration_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="number_steps", - full_name="communicator_objects.DemonstrationMetaProto.number_steps", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="number_episodes", - full_name="communicator_objects.DemonstrationMetaProto.number_episodes", - index=3, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="mean_reward", - full_name="communicator_objects.DemonstrationMetaProto.mean_reward", - index=4, - number=5, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=92, - serialized_end=233, -) - -DESCRIPTOR.message_types_by_name["DemonstrationMetaProto"] = _DEMONSTRATIONMETAPROTO -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -DemonstrationMetaProto = _reflection.GeneratedProtocolMessageType( - "DemonstrationMetaProto", - (_message.Message,), - dict( - DESCRIPTOR=_DEMONSTRATIONMETAPROTO, - __module__="mlagents.envs.communicator_objects.demonstration_meta_proto_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.DemonstrationMetaProto) - ), -) -_sym_db.RegisterMessage(DemonstrationMetaProto) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/engine_configuration_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/engine_configuration_pb2.py new file mode 100644 index 0000000000..d03e1ad4eb --- /dev/null +++ b/ml-agents-envs/mlagents/envs/communicator_objects/engine_configuration_pb2.py @@ -0,0 +1,106 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: mlagents/envs/communicator_objects/engine_configuration.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='mlagents/envs/communicator_objects/engine_configuration.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n=mlagents/envs/communicator_objects/engine_configuration.proto\x12\x14\x63ommunicator_objects\"\x95\x01\n\x18\x45ngineConfigurationProto\x12\r\n\x05width\x18\x01 \x01(\x05\x12\x0e\n\x06height\x18\x02 \x01(\x05\x12\x15\n\rquality_level\x18\x03 \x01(\x05\x12\x12\n\ntime_scale\x18\x04 \x01(\x02\x12\x19\n\x11target_frame_rate\x18\x05 \x01(\x05\x12\x14\n\x0cshow_monitor\x18\x06 \x01(\x08\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') +) + + + + +_ENGINECONFIGURATIONPROTO = _descriptor.Descriptor( + name='EngineConfigurationProto', + full_name='communicator_objects.EngineConfigurationProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='width', full_name='communicator_objects.EngineConfigurationProto.width', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='height', full_name='communicator_objects.EngineConfigurationProto.height', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='quality_level', full_name='communicator_objects.EngineConfigurationProto.quality_level', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='time_scale', full_name='communicator_objects.EngineConfigurationProto.time_scale', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='target_frame_rate', full_name='communicator_objects.EngineConfigurationProto.target_frame_rate', index=4, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='show_monitor', full_name='communicator_objects.EngineConfigurationProto.show_monitor', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=88, + serialized_end=237, +) + +DESCRIPTOR.message_types_by_name['EngineConfigurationProto'] = _ENGINECONFIGURATIONPROTO +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +EngineConfigurationProto = _reflection.GeneratedProtocolMessageType('EngineConfigurationProto', (_message.Message,), dict( + DESCRIPTOR = _ENGINECONFIGURATIONPROTO, + __module__ = 'mlagents.envs.communicator_objects.engine_configuration_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.EngineConfigurationProto) + )) +_sym_db.RegisterMessage(EngineConfigurationProto) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) +# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/engine_configuration_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/engine_configuration_pb2.pyi new file mode 100644 index 0000000000..d4eef07a94 --- /dev/null +++ b/ml-agents-envs/mlagents/envs/communicator_objects/engine_configuration_pb2.pyi @@ -0,0 +1,51 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Optional as typing___Optional, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + +class EngineConfigurationProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + width = ... # type: builtin___int + height = ... # type: builtin___int + quality_level = ... # type: builtin___int + time_scale = ... # type: builtin___float + target_frame_rate = ... # type: builtin___int + show_monitor = ... # type: builtin___bool + + def __init__(self, + *, + width : typing___Optional[builtin___int] = None, + height : typing___Optional[builtin___int] = None, + quality_level : typing___Optional[builtin___int] = None, + time_scale : typing___Optional[builtin___float] = None, + target_frame_rate : typing___Optional[builtin___int] = None, + show_monitor : typing___Optional[builtin___bool] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: builtin___bytes) -> EngineConfigurationProto: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"height",u"quality_level",u"show_monitor",u"target_frame_rate",u"time_scale",u"width"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"height",b"height",u"quality_level",b"quality_level",u"show_monitor",b"show_monitor",u"target_frame_rate",b"target_frame_rate",u"time_scale",b"time_scale",u"width",b"width"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/engine_configuration_proto_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/engine_configuration_proto_pb2.py deleted file mode 100644 index 60ce39e87a..0000000000 --- a/ml-agents-envs/mlagents/envs/communicator_objects/engine_configuration_proto_pb2.py +++ /dev/null @@ -1,173 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: mlagents/envs/communicator_objects/engine_configuration_proto.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/engine_configuration_proto.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\nCmlagents/envs/communicator_objects/engine_configuration_proto.proto\x12\x14\x63ommunicator_objects"\x95\x01\n\x18\x45ngineConfigurationProto\x12\r\n\x05width\x18\x01 \x01(\x05\x12\x0e\n\x06height\x18\x02 \x01(\x05\x12\x15\n\rquality_level\x18\x03 \x01(\x05\x12\x12\n\ntime_scale\x18\x04 \x01(\x02\x12\x19\n\x11target_frame_rate\x18\x05 \x01(\x05\x12\x14\n\x0cshow_monitor\x18\x06 \x01(\x08\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), -) - - -_ENGINECONFIGURATIONPROTO = _descriptor.Descriptor( - name="EngineConfigurationProto", - full_name="communicator_objects.EngineConfigurationProto", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="width", - full_name="communicator_objects.EngineConfigurationProto.width", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="height", - full_name="communicator_objects.EngineConfigurationProto.height", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="quality_level", - full_name="communicator_objects.EngineConfigurationProto.quality_level", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="time_scale", - full_name="communicator_objects.EngineConfigurationProto.time_scale", - index=3, - number=4, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="target_frame_rate", - full_name="communicator_objects.EngineConfigurationProto.target_frame_rate", - index=4, - number=5, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="show_monitor", - full_name="communicator_objects.EngineConfigurationProto.show_monitor", - index=5, - number=6, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=94, - serialized_end=243, -) - -DESCRIPTOR.message_types_by_name["EngineConfigurationProto"] = _ENGINECONFIGURATIONPROTO -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -EngineConfigurationProto = _reflection.GeneratedProtocolMessageType( - "EngineConfigurationProto", - (_message.Message,), - dict( - DESCRIPTOR=_ENGINECONFIGURATIONPROTO, - __module__="mlagents.envs.communicator_objects.engine_configuration_proto_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.EngineConfigurationProto) - ), -) -_sym_db.RegisterMessage(EngineConfigurationProto) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/engine_configuration_proto_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/engine_configuration_proto_pb2.pyi deleted file mode 100644 index 5d4220fbdc..0000000000 --- a/ml-agents-envs/mlagents/envs/communicator_objects/engine_configuration_proto_pb2.pyi +++ /dev/null @@ -1,39 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from typing import ( - Optional as typing___Optional, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class EngineConfigurationProto(google___protobuf___message___Message): - width = ... # type: int - height = ... # type: int - quality_level = ... # type: int - time_scale = ... # type: float - target_frame_rate = ... # type: int - show_monitor = ... # type: bool - - def __init__(self, - width : typing___Optional[int] = None, - height : typing___Optional[int] = None, - quality_level : typing___Optional[int] = None, - time_scale : typing___Optional[float] = None, - target_frame_rate : typing___Optional[int] = None, - show_monitor : typing___Optional[bool] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> EngineConfigurationProto: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"height",u"quality_level",u"show_monitor",u"target_frame_rate",u"time_scale",u"width"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"height",b"quality_level",b"show_monitor",b"target_frame_rate",b"time_scale",b"width"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/environment_parameters_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/environment_parameters_pb2.py new file mode 100644 index 0000000000..c180309b22 --- /dev/null +++ b/ml-agents-envs/mlagents/envs/communicator_objects/environment_parameters_pb2.py @@ -0,0 +1,130 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: mlagents/envs/communicator_objects/environment_parameters.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from mlagents.envs.communicator_objects import custom_reset_parameters_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_custom__reset__parameters__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='mlagents/envs/communicator_objects/environment_parameters.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n?mlagents/envs/communicator_objects/environment_parameters.proto\x12\x14\x63ommunicator_objects\x1a@mlagents/envs/communicator_objects/custom_reset_parameters.proto\"\x88\x02\n\x1a\x45nvironmentParametersProto\x12_\n\x10\x66loat_parameters\x18\x01 \x03(\x0b\x32\x45.communicator_objects.EnvironmentParametersProto.FloatParametersEntry\x12Q\n\x17\x63ustom_reset_parameters\x18\x02 \x01(\x0b\x32\x30.communicator_objects.CustomResetParametersProto\x1a\x36\n\x14\x46loatParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x02:\x02\x38\x01\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') + , + dependencies=[mlagents_dot_envs_dot_communicator__objects_dot_custom__reset__parameters__pb2.DESCRIPTOR,]) + + + + +_ENVIRONMENTPARAMETERSPROTO_FLOATPARAMETERSENTRY = _descriptor.Descriptor( + name='FloatParametersEntry', + full_name='communicator_objects.EnvironmentParametersProto.FloatParametersEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='communicator_objects.EnvironmentParametersProto.FloatParametersEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='communicator_objects.EnvironmentParametersProto.FloatParametersEntry.value', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=366, + serialized_end=420, +) + +_ENVIRONMENTPARAMETERSPROTO = _descriptor.Descriptor( + name='EnvironmentParametersProto', + full_name='communicator_objects.EnvironmentParametersProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='float_parameters', full_name='communicator_objects.EnvironmentParametersProto.float_parameters', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='custom_reset_parameters', full_name='communicator_objects.EnvironmentParametersProto.custom_reset_parameters', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_ENVIRONMENTPARAMETERSPROTO_FLOATPARAMETERSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=156, + serialized_end=420, +) + +_ENVIRONMENTPARAMETERSPROTO_FLOATPARAMETERSENTRY.containing_type = _ENVIRONMENTPARAMETERSPROTO +_ENVIRONMENTPARAMETERSPROTO.fields_by_name['float_parameters'].message_type = _ENVIRONMENTPARAMETERSPROTO_FLOATPARAMETERSENTRY +_ENVIRONMENTPARAMETERSPROTO.fields_by_name['custom_reset_parameters'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_custom__reset__parameters__pb2._CUSTOMRESETPARAMETERSPROTO +DESCRIPTOR.message_types_by_name['EnvironmentParametersProto'] = _ENVIRONMENTPARAMETERSPROTO +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +EnvironmentParametersProto = _reflection.GeneratedProtocolMessageType('EnvironmentParametersProto', (_message.Message,), dict( + + FloatParametersEntry = _reflection.GeneratedProtocolMessageType('FloatParametersEntry', (_message.Message,), dict( + DESCRIPTOR = _ENVIRONMENTPARAMETERSPROTO_FLOATPARAMETERSENTRY, + __module__ = 'mlagents.envs.communicator_objects.environment_parameters_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.EnvironmentParametersProto.FloatParametersEntry) + )) + , + DESCRIPTOR = _ENVIRONMENTPARAMETERSPROTO, + __module__ = 'mlagents.envs.communicator_objects.environment_parameters_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.EnvironmentParametersProto) + )) +_sym_db.RegisterMessage(EnvironmentParametersProto) +_sym_db.RegisterMessage(EnvironmentParametersProto.FloatParametersEntry) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) +_ENVIRONMENTPARAMETERSPROTO_FLOATPARAMETERSENTRY.has_options = True +_ENVIRONMENTPARAMETERSPROTO_FLOATPARAMETERSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/environment_parameters_proto_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/environment_parameters_pb2.pyi similarity index 64% rename from ml-agents-envs/mlagents/envs/communicator_objects/environment_parameters_proto_pb2.pyi rename to ml-agents-envs/mlagents/envs/communicator_objects/environment_parameters_pb2.pyi index daba639760..8217e67d47 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/environment_parameters_proto_pb2.pyi +++ b/ml-agents-envs/mlagents/envs/communicator_objects/environment_parameters_pb2.pyi @@ -1,11 +1,15 @@ # @generated by generate_proto_mypy_stubs.py. Do not edit! import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + from google.protobuf.message import ( Message as google___protobuf___message___Message, ) from mlagents.envs.communicator_objects.custom_reset_parameters_pb2 import ( - CustomResetParameters as mlagents___envs___communicator_objects___custom_reset_parameters_pb2___CustomResetParameters, + CustomResetParametersProto as mlagents___envs___communicator_objects___custom_reset_parameters_pb2___CustomResetParametersProto, ) from typing import ( @@ -20,42 +24,52 @@ from typing_extensions import ( ) +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + class EnvironmentParametersProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... class FloatParametersEntry(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... key = ... # type: typing___Text - value = ... # type: float + value = ... # type: builtin___float def __init__(self, + *, key : typing___Optional[typing___Text] = None, - value : typing___Optional[float] = None, + value : typing___Optional[builtin___float] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> EnvironmentParametersProto.FloatParametersEntry: ... + def FromString(cls, s: builtin___bytes) -> EnvironmentParametersProto.FloatParametersEntry: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... else: - def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"key",b"key",u"value",b"value"]) -> None: ... @property - def float_parameters(self) -> typing___MutableMapping[typing___Text, float]: ... + def float_parameters(self) -> typing___MutableMapping[typing___Text, builtin___float]: ... @property - def custom_reset_parameters(self) -> mlagents___envs___communicator_objects___custom_reset_parameters_pb2___CustomResetParameters: ... + def custom_reset_parameters(self) -> mlagents___envs___communicator_objects___custom_reset_parameters_pb2___CustomResetParametersProto: ... def __init__(self, - float_parameters : typing___Optional[typing___Mapping[typing___Text, float]] = None, - custom_reset_parameters : typing___Optional[mlagents___envs___communicator_objects___custom_reset_parameters_pb2___CustomResetParameters] = None, + *, + float_parameters : typing___Optional[typing___Mapping[typing___Text, builtin___float]] = None, + custom_reset_parameters : typing___Optional[mlagents___envs___communicator_objects___custom_reset_parameters_pb2___CustomResetParametersProto] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> EnvironmentParametersProto: ... + def FromString(cls, s: builtin___bytes) -> EnvironmentParametersProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"custom_reset_parameters"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"custom_reset_parameters"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"custom_reset_parameters",u"float_parameters"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"custom_reset_parameters",b"custom_reset_parameters"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"custom_reset_parameters",b"float_parameters"]) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"custom_reset_parameters",b"custom_reset_parameters"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"custom_reset_parameters",b"custom_reset_parameters",u"float_parameters",b"float_parameters"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/environment_parameters_proto_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/environment_parameters_proto_pb2.py deleted file mode 100644 index 0a7819bfb9..0000000000 --- a/ml-agents-envs/mlagents/envs/communicator_objects/environment_parameters_proto_pb2.py +++ /dev/null @@ -1,189 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: mlagents/envs/communicator_objects/environment_parameters_proto.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from mlagents.envs.communicator_objects import ( - custom_reset_parameters_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_custom__reset__parameters__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/environment_parameters_proto.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\nEmlagents/envs/communicator_objects/environment_parameters_proto.proto\x12\x14\x63ommunicator_objects\x1a@mlagents/envs/communicator_objects/custom_reset_parameters.proto"\x83\x02\n\x1a\x45nvironmentParametersProto\x12_\n\x10\x66loat_parameters\x18\x01 \x03(\x0b\x32\x45.communicator_objects.EnvironmentParametersProto.FloatParametersEntry\x12L\n\x17\x63ustom_reset_parameters\x18\x02 \x01(\x0b\x32+.communicator_objects.CustomResetParameters\x1a\x36\n\x14\x46loatParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x02:\x02\x38\x01\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), - dependencies=[ - mlagents_dot_envs_dot_communicator__objects_dot_custom__reset__parameters__pb2.DESCRIPTOR - ], -) - - -_ENVIRONMENTPARAMETERSPROTO_FLOATPARAMETERSENTRY = _descriptor.Descriptor( - name="FloatParametersEntry", - full_name="communicator_objects.EnvironmentParametersProto.FloatParametersEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="communicator_objects.EnvironmentParametersProto.FloatParametersEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="communicator_objects.EnvironmentParametersProto.FloatParametersEntry.value", - index=1, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=367, - serialized_end=421, -) - -_ENVIRONMENTPARAMETERSPROTO = _descriptor.Descriptor( - name="EnvironmentParametersProto", - full_name="communicator_objects.EnvironmentParametersProto", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="float_parameters", - full_name="communicator_objects.EnvironmentParametersProto.float_parameters", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="custom_reset_parameters", - full_name="communicator_objects.EnvironmentParametersProto.custom_reset_parameters", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_ENVIRONMENTPARAMETERSPROTO_FLOATPARAMETERSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=162, - serialized_end=421, -) - -_ENVIRONMENTPARAMETERSPROTO_FLOATPARAMETERSENTRY.containing_type = ( - _ENVIRONMENTPARAMETERSPROTO -) -_ENVIRONMENTPARAMETERSPROTO.fields_by_name[ - "float_parameters" -].message_type = _ENVIRONMENTPARAMETERSPROTO_FLOATPARAMETERSENTRY -_ENVIRONMENTPARAMETERSPROTO.fields_by_name[ - "custom_reset_parameters" -].message_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_custom__reset__parameters__pb2._CUSTOMRESETPARAMETERS -) -DESCRIPTOR.message_types_by_name[ - "EnvironmentParametersProto" -] = _ENVIRONMENTPARAMETERSPROTO -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -EnvironmentParametersProto = _reflection.GeneratedProtocolMessageType( - "EnvironmentParametersProto", - (_message.Message,), - dict( - FloatParametersEntry=_reflection.GeneratedProtocolMessageType( - "FloatParametersEntry", - (_message.Message,), - dict( - DESCRIPTOR=_ENVIRONMENTPARAMETERSPROTO_FLOATPARAMETERSENTRY, - __module__="mlagents.envs.communicator_objects.environment_parameters_proto_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.EnvironmentParametersProto.FloatParametersEntry) - ), - ), - DESCRIPTOR=_ENVIRONMENTPARAMETERSPROTO, - __module__="mlagents.envs.communicator_objects.environment_parameters_proto_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.EnvironmentParametersProto) - ), -) -_sym_db.RegisterMessage(EnvironmentParametersProto) -_sym_db.RegisterMessage(EnvironmentParametersProto.FloatParametersEntry) - - -DESCRIPTOR._options = None -_ENVIRONMENTPARAMETERSPROTO_FLOATPARAMETERSENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/header_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/header_pb2.py index e43e4d9be4..a1ba8ae549 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/header_pb2.py +++ b/ml-agents-envs/mlagents/envs/communicator_objects/header_pb2.py @@ -1,101 +1,78 @@ -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: mlagents/envs/communicator_objects/header.proto import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database - +from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() + + DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/header.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\n/mlagents/envs/communicator_objects/header.proto\x12\x14\x63ommunicator_objects")\n\x06Header\x12\x0e\n\x06status\x18\x01 \x01(\x05\x12\x0f\n\x07message\x18\x02 \x01(\tB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), + name='mlagents/envs/communicator_objects/header.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n/mlagents/envs/communicator_objects/header.proto\x12\x14\x63ommunicator_objects\".\n\x0bHeaderProto\x12\x0e\n\x06status\x18\x01 \x01(\x05\x12\x0f\n\x07message\x18\x02 \x01(\tB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') ) -_HEADER = _descriptor.Descriptor( - name="Header", - full_name="communicator_objects.Header", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="status", - full_name="communicator_objects.Header.status", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="message", - full_name="communicator_objects.Header.message", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=73, - serialized_end=114, + + +_HEADERPROTO = _descriptor.Descriptor( + name='HeaderProto', + full_name='communicator_objects.HeaderProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='status', full_name='communicator_objects.HeaderProto.status', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='message', full_name='communicator_objects.HeaderProto.message', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=73, + serialized_end=119, ) -DESCRIPTOR.message_types_by_name["Header"] = _HEADER +DESCRIPTOR.message_types_by_name['HeaderProto'] = _HEADERPROTO _sym_db.RegisterFileDescriptor(DESCRIPTOR) -Header = _reflection.GeneratedProtocolMessageType( - "Header", - (_message.Message,), - dict( - DESCRIPTOR=_HEADER, - __module__="mlagents.envs.communicator_objects.header_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.Header) - ), -) -_sym_db.RegisterMessage(Header) +HeaderProto = _reflection.GeneratedProtocolMessageType('HeaderProto', (_message.Message,), dict( + DESCRIPTOR = _HEADERPROTO, + __module__ = 'mlagents.envs.communicator_objects.header_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.HeaderProto) + )) +_sym_db.RegisterMessage(HeaderProto) -DESCRIPTOR._options = None +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) # @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/header_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/header_pb2.pyi index 3852336713..ab7fdd6323 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/header_pb2.pyi +++ b/ml-agents-envs/mlagents/envs/communicator_objects/header_pb2.pyi @@ -1,5 +1,9 @@ # @generated by generate_proto_mypy_stubs.py. Do not edit! import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + from google.protobuf.message import ( Message as google___protobuf___message___Message, ) @@ -14,19 +18,27 @@ from typing_extensions import ( ) -class Header(google___protobuf___message___Message): - status = ... # type: int +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + +class HeaderProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + status = ... # type: builtin___int message = ... # type: typing___Text def __init__(self, - status : typing___Optional[int] = None, + *, + status : typing___Optional[builtin___int] = None, message : typing___Optional[typing___Text] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> Header: ... + def FromString(cls, s: builtin___bytes) -> HeaderProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): def ClearField(self, field_name: typing_extensions___Literal[u"message",u"status"]) -> None: ... else: - def ClearField(self, field_name: typing_extensions___Literal[b"message",b"status"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"message",b"message",u"status",b"status"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/resolution_proto_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/resolution_proto_pb2.py deleted file mode 100644 index 4e44597916..0000000000 --- a/ml-agents-envs/mlagents/envs/communicator_objects/resolution_proto_pb2.py +++ /dev/null @@ -1,119 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: mlagents/envs/communicator_objects/resolution_proto.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/resolution_proto.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\n9mlagents/envs/communicator_objects/resolution_proto.proto\x12\x14\x63ommunicator_objects"D\n\x0fResolutionProto\x12\r\n\x05width\x18\x01 \x01(\x05\x12\x0e\n\x06height\x18\x02 \x01(\x05\x12\x12\n\ngray_scale\x18\x03 \x01(\x08\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), -) - - -_RESOLUTIONPROTO = _descriptor.Descriptor( - name="ResolutionProto", - full_name="communicator_objects.ResolutionProto", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="width", - full_name="communicator_objects.ResolutionProto.width", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="height", - full_name="communicator_objects.ResolutionProto.height", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="gray_scale", - full_name="communicator_objects.ResolutionProto.gray_scale", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=83, - serialized_end=151, -) - -DESCRIPTOR.message_types_by_name["ResolutionProto"] = _RESOLUTIONPROTO -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ResolutionProto = _reflection.GeneratedProtocolMessageType( - "ResolutionProto", - (_message.Message,), - dict( - DESCRIPTOR=_RESOLUTIONPROTO, - __module__="mlagents.envs.communicator_objects.resolution_proto_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.ResolutionProto) - ), -) -_sym_db.RegisterMessage(ResolutionProto) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/resolution_proto_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/resolution_proto_pb2.pyi deleted file mode 100644 index a9068d4833..0000000000 --- a/ml-agents-envs/mlagents/envs/communicator_objects/resolution_proto_pb2.pyi +++ /dev/null @@ -1,33 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from typing import ( - Optional as typing___Optional, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class ResolutionProto(google___protobuf___message___Message): - width = ... # type: int - height = ... # type: int - gray_scale = ... # type: bool - - def __init__(self, - width : typing___Optional[int] = None, - height : typing___Optional[int] = None, - gray_scale : typing___Optional[bool] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> ResolutionProto: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"gray_scale",u"height",u"width"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"gray_scale",b"height",b"width"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/space_type_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/space_type_pb2.py new file mode 100644 index 0000000000..6b69ceef5f --- /dev/null +++ b/ml-agents-envs/mlagents/envs/communicator_objects/space_type_pb2.py @@ -0,0 +1,59 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: mlagents/envs/communicator_objects/space_type.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='mlagents/envs/communicator_objects/space_type.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n3mlagents/envs/communicator_objects/space_type.proto\x12\x14\x63ommunicator_objects*.\n\x0eSpaceTypeProto\x12\x0c\n\x08\x64iscrete\x10\x00\x12\x0e\n\ncontinuous\x10\x01\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') +) + +_SPACETYPEPROTO = _descriptor.EnumDescriptor( + name='SpaceTypeProto', + full_name='communicator_objects.SpaceTypeProto', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='discrete', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='continuous', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=77, + serialized_end=123, +) +_sym_db.RegisterEnumDescriptor(_SPACETYPEPROTO) + +SpaceTypeProto = enum_type_wrapper.EnumTypeWrapper(_SPACETYPEPROTO) +discrete = 0 +continuous = 1 + + +DESCRIPTOR.enum_types_by_name['SpaceTypeProto'] = _SPACETYPEPROTO +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) +# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/space_type_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/space_type_pb2.pyi new file mode 100644 index 0000000000..4527ffcfed --- /dev/null +++ b/ml-agents-envs/mlagents/envs/communicator_objects/space_type_pb2.pyi @@ -0,0 +1,37 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.descriptor import ( + EnumDescriptor as google___protobuf___descriptor___EnumDescriptor, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + List as typing___List, + Tuple as typing___Tuple, + cast as typing___cast, +) + + +builtin___int = int +builtin___str = str + + +class SpaceTypeProto(builtin___int): + DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... + @classmethod + def Name(cls, number: builtin___int) -> builtin___str: ... + @classmethod + def Value(cls, name: builtin___str) -> 'SpaceTypeProto': ... + @classmethod + def keys(cls) -> typing___List[builtin___str]: ... + @classmethod + def values(cls) -> typing___List['SpaceTypeProto']: ... + @classmethod + def items(cls) -> typing___List[typing___Tuple[builtin___str, 'SpaceTypeProto']]: ... + discrete = typing___cast('SpaceTypeProto', 0) + continuous = typing___cast('SpaceTypeProto', 1) +discrete = typing___cast('SpaceTypeProto', 0) +continuous = typing___cast('SpaceTypeProto', 1) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/space_type_proto_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/space_type_proto_pb2.py deleted file mode 100644 index 186ef24d5c..0000000000 --- a/ml-agents-envs/mlagents/envs/communicator_objects/space_type_proto_pb2.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: mlagents/envs/communicator_objects/space_type_proto.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from mlagents.envs.communicator_objects import ( - resolution_proto_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_resolution__proto__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/space_type_proto.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - "\n9mlagents/envs/communicator_objects/space_type_proto.proto\x12\x14\x63ommunicator_objects\x1a\x39mlagents/envs/communicator_objects/resolution_proto.proto*.\n\x0eSpaceTypeProto\x12\x0c\n\x08\x64iscrete\x10\x00\x12\x0e\n\ncontinuous\x10\x01\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3" - ), - dependencies=[ - mlagents_dot_envs_dot_communicator__objects_dot_resolution__proto__pb2.DESCRIPTOR - ], -) - -_SPACETYPEPROTO = _descriptor.EnumDescriptor( - name="SpaceTypeProto", - full_name="communicator_objects.SpaceTypeProto", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="discrete", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="continuous", index=1, number=1, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=142, - serialized_end=188, -) -_sym_db.RegisterEnumDescriptor(_SPACETYPEPROTO) - -SpaceTypeProto = enum_type_wrapper.EnumTypeWrapper(_SPACETYPEPROTO) -discrete = 0 -continuous = 1 - - -DESCRIPTOR.enum_types_by_name["SpaceTypeProto"] = _SPACETYPEPROTO -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/space_type_proto_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/space_type_proto_pb2.pyi deleted file mode 100644 index 8dae72559b..0000000000 --- a/ml-agents-envs/mlagents/envs/communicator_objects/space_type_proto_pb2.pyi +++ /dev/null @@ -1,31 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from google.protobuf.descriptor import ( - EnumDescriptor as google___protobuf___descriptor___EnumDescriptor, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from typing import ( - List as typing___List, - Tuple as typing___Tuple, - cast as typing___cast, -) - - -class SpaceTypeProto(int): - DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... - @classmethod - def Name(cls, number: int) -> str: ... - @classmethod - def Value(cls, name: str) -> SpaceTypeProto: ... - @classmethod - def keys(cls) -> typing___List[str]: ... - @classmethod - def values(cls) -> typing___List[SpaceTypeProto]: ... - @classmethod - def items(cls) -> typing___List[typing___Tuple[str, SpaceTypeProto]]: ... -discrete = typing___cast(SpaceTypeProto, 0) -continuous = typing___cast(SpaceTypeProto, 1) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_input_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/unity_input_pb2.py index dcf7951d80..c8c04d2350 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_input_pb2.py +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_input_pb2.py @@ -1,123 +1,83 @@ -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: mlagents/envs/communicator_objects/unity_input.proto import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database - +from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from mlagents.envs.communicator_objects import ( - unity_rl_input_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__input__pb2, -) -from mlagents.envs.communicator_objects import ( - unity_rl_initialization_input_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__initialization__input__pb2, -) +from mlagents.envs.communicator_objects import unity_rl_input_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__input__pb2 +from mlagents.envs.communicator_objects import unity_rl_initialization_input_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__initialization__input__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/unity_input.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\n4mlagents/envs/communicator_objects/unity_input.proto\x12\x14\x63ommunicator_objects\x1a\x37mlagents/envs/communicator_objects/unity_rl_input.proto\x1a\x46mlagents/envs/communicator_objects/unity_rl_initialization_input.proto"\x95\x01\n\nUnityInput\x12\x34\n\x08rl_input\x18\x01 \x01(\x0b\x32".communicator_objects.UnityRLInput\x12Q\n\x17rl_initialization_input\x18\x02 \x01(\x0b\x32\x30.communicator_objects.UnityRLInitializationInputB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), - dependencies=[ - mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__input__pb2.DESCRIPTOR, - mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__initialization__input__pb2.DESCRIPTOR, - ], -) + name='mlagents/envs/communicator_objects/unity_input.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n4mlagents/envs/communicator_objects/unity_input.proto\x12\x14\x63ommunicator_objects\x1a\x37mlagents/envs/communicator_objects/unity_rl_input.proto\x1a\x46mlagents/envs/communicator_objects/unity_rl_initialization_input.proto\"\xa4\x01\n\x0fUnityInputProto\x12\x39\n\x08rl_input\x18\x01 \x01(\x0b\x32\'.communicator_objects.UnityRLInputProto\x12V\n\x17rl_initialization_input\x18\x02 \x01(\x0b\x32\x35.communicator_objects.UnityRLInitializationInputProtoB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') + , + dependencies=[mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__input__pb2.DESCRIPTOR,mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__initialization__input__pb2.DESCRIPTOR,]) -_UNITYINPUT = _descriptor.Descriptor( - name="UnityInput", - full_name="communicator_objects.UnityInput", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="rl_input", - full_name="communicator_objects.UnityInput.rl_input", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="rl_initialization_input", - full_name="communicator_objects.UnityInput.rl_initialization_input", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=208, - serialized_end=357, -) -_UNITYINPUT.fields_by_name[ - "rl_input" -].message_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__input__pb2._UNITYRLINPUT -) -_UNITYINPUT.fields_by_name[ - "rl_initialization_input" -].message_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__initialization__input__pb2._UNITYRLINITIALIZATIONINPUT + +_UNITYINPUTPROTO = _descriptor.Descriptor( + name='UnityInputProto', + full_name='communicator_objects.UnityInputProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='rl_input', full_name='communicator_objects.UnityInputProto.rl_input', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='rl_initialization_input', full_name='communicator_objects.UnityInputProto.rl_initialization_input', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=208, + serialized_end=372, ) -DESCRIPTOR.message_types_by_name["UnityInput"] = _UNITYINPUT + +_UNITYINPUTPROTO.fields_by_name['rl_input'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__input__pb2._UNITYRLINPUTPROTO +_UNITYINPUTPROTO.fields_by_name['rl_initialization_input'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__initialization__input__pb2._UNITYRLINITIALIZATIONINPUTPROTO +DESCRIPTOR.message_types_by_name['UnityInputProto'] = _UNITYINPUTPROTO _sym_db.RegisterFileDescriptor(DESCRIPTOR) -UnityInput = _reflection.GeneratedProtocolMessageType( - "UnityInput", - (_message.Message,), - dict( - DESCRIPTOR=_UNITYINPUT, - __module__="mlagents.envs.communicator_objects.unity_input_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.UnityInput) - ), -) -_sym_db.RegisterMessage(UnityInput) +UnityInputProto = _reflection.GeneratedProtocolMessageType('UnityInputProto', (_message.Message,), dict( + DESCRIPTOR = _UNITYINPUTPROTO, + __module__ = 'mlagents.envs.communicator_objects.unity_input_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.UnityInputProto) + )) +_sym_db.RegisterMessage(UnityInputProto) -DESCRIPTOR._options = None +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) # @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_input_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/unity_input_pb2.pyi index 372ebce4cc..16abb431ba 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_input_pb2.pyi +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_input_pb2.pyi @@ -1,15 +1,19 @@ # @generated by generate_proto_mypy_stubs.py. Do not edit! import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + from google.protobuf.message import ( Message as google___protobuf___message___Message, ) from mlagents.envs.communicator_objects.unity_rl_initialization_input_pb2 import ( - UnityRLInitializationInput as mlagents___envs___communicator_objects___unity_rl_initialization_input_pb2___UnityRLInitializationInput, + UnityRLInitializationInputProto as mlagents___envs___communicator_objects___unity_rl_initialization_input_pb2___UnityRLInitializationInputProto, ) from mlagents.envs.communicator_objects.unity_rl_input_pb2 import ( - UnityRLInput as mlagents___envs___communicator_objects___unity_rl_input_pb2___UnityRLInput, + UnityRLInputProto as mlagents___envs___communicator_objects___unity_rl_input_pb2___UnityRLInputProto, ) from typing import ( @@ -21,25 +25,33 @@ from typing_extensions import ( ) -class UnityInput(google___protobuf___message___Message): +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + +class UnityInputProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... @property - def rl_input(self) -> mlagents___envs___communicator_objects___unity_rl_input_pb2___UnityRLInput: ... + def rl_input(self) -> mlagents___envs___communicator_objects___unity_rl_input_pb2___UnityRLInputProto: ... @property - def rl_initialization_input(self) -> mlagents___envs___communicator_objects___unity_rl_initialization_input_pb2___UnityRLInitializationInput: ... + def rl_initialization_input(self) -> mlagents___envs___communicator_objects___unity_rl_initialization_input_pb2___UnityRLInitializationInputProto: ... def __init__(self, - rl_input : typing___Optional[mlagents___envs___communicator_objects___unity_rl_input_pb2___UnityRLInput] = None, - rl_initialization_input : typing___Optional[mlagents___envs___communicator_objects___unity_rl_initialization_input_pb2___UnityRLInitializationInput] = None, + *, + rl_input : typing___Optional[mlagents___envs___communicator_objects___unity_rl_input_pb2___UnityRLInputProto] = None, + rl_initialization_input : typing___Optional[mlagents___envs___communicator_objects___unity_rl_initialization_input_pb2___UnityRLInitializationInputProto] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> UnityInput: ... + def FromString(cls, s: builtin___bytes) -> UnityInputProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"rl_initialization_input",u"rl_input"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"rl_initialization_input",u"rl_input"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"rl_initialization_input",u"rl_input"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"rl_initialization_input",b"rl_initialization_input",u"rl_input",b"rl_input"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"rl_initialization_input",b"rl_input"]) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"rl_initialization_input",b"rl_initialization_input",u"rl_input",b"rl_input"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"rl_initialization_input",b"rl_initialization_input",u"rl_input",b"rl_input"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_message_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/unity_message_pb2.py index 3a88198ffa..1acbe12ae7 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_message_pb2.py +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_message_pb2.py @@ -1,148 +1,92 @@ -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: mlagents/envs/communicator_objects/unity_message.proto import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database - +from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from mlagents.envs.communicator_objects import ( - unity_output_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__output__pb2, -) -from mlagents.envs.communicator_objects import ( - unity_input_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__input__pb2, -) -from mlagents.envs.communicator_objects import ( - header_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_header__pb2, -) +from mlagents.envs.communicator_objects import unity_output_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__output__pb2 +from mlagents.envs.communicator_objects import unity_input_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__input__pb2 +from mlagents.envs.communicator_objects import header_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_header__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/unity_message.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\n6mlagents/envs/communicator_objects/unity_message.proto\x12\x14\x63ommunicator_objects\x1a\x35mlagents/envs/communicator_objects/unity_output.proto\x1a\x34mlagents/envs/communicator_objects/unity_input.proto\x1a/mlagents/envs/communicator_objects/header.proto"\xac\x01\n\x0cUnityMessage\x12,\n\x06header\x18\x01 \x01(\x0b\x32\x1c.communicator_objects.Header\x12\x37\n\x0cunity_output\x18\x02 \x01(\x0b\x32!.communicator_objects.UnityOutput\x12\x35\n\x0bunity_input\x18\x03 \x01(\x0b\x32 .communicator_objects.UnityInputB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), - dependencies=[ - mlagents_dot_envs_dot_communicator__objects_dot_unity__output__pb2.DESCRIPTOR, - mlagents_dot_envs_dot_communicator__objects_dot_unity__input__pb2.DESCRIPTOR, - mlagents_dot_envs_dot_communicator__objects_dot_header__pb2.DESCRIPTOR, - ], -) + name='mlagents/envs/communicator_objects/unity_message.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n6mlagents/envs/communicator_objects/unity_message.proto\x12\x14\x63ommunicator_objects\x1a\x35mlagents/envs/communicator_objects/unity_output.proto\x1a\x34mlagents/envs/communicator_objects/unity_input.proto\x1a/mlagents/envs/communicator_objects/header.proto\"\xc0\x01\n\x11UnityMessageProto\x12\x31\n\x06header\x18\x01 \x01(\x0b\x32!.communicator_objects.HeaderProto\x12<\n\x0cunity_output\x18\x02 \x01(\x0b\x32&.communicator_objects.UnityOutputProto\x12:\n\x0bunity_input\x18\x03 \x01(\x0b\x32%.communicator_objects.UnityInputProtoB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') + , + dependencies=[mlagents_dot_envs_dot_communicator__objects_dot_unity__output__pb2.DESCRIPTOR,mlagents_dot_envs_dot_communicator__objects_dot_unity__input__pb2.DESCRIPTOR,mlagents_dot_envs_dot_communicator__objects_dot_header__pb2.DESCRIPTOR,]) -_UNITYMESSAGE = _descriptor.Descriptor( - name="UnityMessage", - full_name="communicator_objects.UnityMessage", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="header", - full_name="communicator_objects.UnityMessage.header", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="unity_output", - full_name="communicator_objects.UnityMessage.unity_output", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="unity_input", - full_name="communicator_objects.UnityMessage.unity_input", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=239, - serialized_end=411, -) -_UNITYMESSAGE.fields_by_name[ - "header" -].message_type = mlagents_dot_envs_dot_communicator__objects_dot_header__pb2._HEADER -_UNITYMESSAGE.fields_by_name[ - "unity_output" -].message_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_unity__output__pb2._UNITYOUTPUT -) -_UNITYMESSAGE.fields_by_name[ - "unity_input" -].message_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_unity__input__pb2._UNITYINPUT + +_UNITYMESSAGEPROTO = _descriptor.Descriptor( + name='UnityMessageProto', + full_name='communicator_objects.UnityMessageProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='header', full_name='communicator_objects.UnityMessageProto.header', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='unity_output', full_name='communicator_objects.UnityMessageProto.unity_output', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='unity_input', full_name='communicator_objects.UnityMessageProto.unity_input', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=239, + serialized_end=431, ) -DESCRIPTOR.message_types_by_name["UnityMessage"] = _UNITYMESSAGE + +_UNITYMESSAGEPROTO.fields_by_name['header'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_header__pb2._HEADERPROTO +_UNITYMESSAGEPROTO.fields_by_name['unity_output'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_unity__output__pb2._UNITYOUTPUTPROTO +_UNITYMESSAGEPROTO.fields_by_name['unity_input'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_unity__input__pb2._UNITYINPUTPROTO +DESCRIPTOR.message_types_by_name['UnityMessageProto'] = _UNITYMESSAGEPROTO _sym_db.RegisterFileDescriptor(DESCRIPTOR) -UnityMessage = _reflection.GeneratedProtocolMessageType( - "UnityMessage", - (_message.Message,), - dict( - DESCRIPTOR=_UNITYMESSAGE, - __module__="mlagents.envs.communicator_objects.unity_message_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.UnityMessage) - ), -) -_sym_db.RegisterMessage(UnityMessage) +UnityMessageProto = _reflection.GeneratedProtocolMessageType('UnityMessageProto', (_message.Message,), dict( + DESCRIPTOR = _UNITYMESSAGEPROTO, + __module__ = 'mlagents.envs.communicator_objects.unity_message_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.UnityMessageProto) + )) +_sym_db.RegisterMessage(UnityMessageProto) -DESCRIPTOR._options = None +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) # @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_message_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/unity_message_pb2.pyi index 04d221534b..d2752feb34 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_message_pb2.pyi +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_message_pb2.pyi @@ -1,19 +1,23 @@ # @generated by generate_proto_mypy_stubs.py. Do not edit! import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + from google.protobuf.message import ( Message as google___protobuf___message___Message, ) from mlagents.envs.communicator_objects.header_pb2 import ( - Header as mlagents___envs___communicator_objects___header_pb2___Header, + HeaderProto as mlagents___envs___communicator_objects___header_pb2___HeaderProto, ) from mlagents.envs.communicator_objects.unity_input_pb2 import ( - UnityInput as mlagents___envs___communicator_objects___unity_input_pb2___UnityInput, + UnityInputProto as mlagents___envs___communicator_objects___unity_input_pb2___UnityInputProto, ) from mlagents.envs.communicator_objects.unity_output_pb2 import ( - UnityOutput as mlagents___envs___communicator_objects___unity_output_pb2___UnityOutput, + UnityOutputProto as mlagents___envs___communicator_objects___unity_output_pb2___UnityOutputProto, ) from typing import ( @@ -25,29 +29,37 @@ from typing_extensions import ( ) -class UnityMessage(google___protobuf___message___Message): +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + +class UnityMessageProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... @property - def header(self) -> mlagents___envs___communicator_objects___header_pb2___Header: ... + def header(self) -> mlagents___envs___communicator_objects___header_pb2___HeaderProto: ... @property - def unity_output(self) -> mlagents___envs___communicator_objects___unity_output_pb2___UnityOutput: ... + def unity_output(self) -> mlagents___envs___communicator_objects___unity_output_pb2___UnityOutputProto: ... @property - def unity_input(self) -> mlagents___envs___communicator_objects___unity_input_pb2___UnityInput: ... + def unity_input(self) -> mlagents___envs___communicator_objects___unity_input_pb2___UnityInputProto: ... def __init__(self, - header : typing___Optional[mlagents___envs___communicator_objects___header_pb2___Header] = None, - unity_output : typing___Optional[mlagents___envs___communicator_objects___unity_output_pb2___UnityOutput] = None, - unity_input : typing___Optional[mlagents___envs___communicator_objects___unity_input_pb2___UnityInput] = None, + *, + header : typing___Optional[mlagents___envs___communicator_objects___header_pb2___HeaderProto] = None, + unity_output : typing___Optional[mlagents___envs___communicator_objects___unity_output_pb2___UnityOutputProto] = None, + unity_input : typing___Optional[mlagents___envs___communicator_objects___unity_input_pb2___UnityInputProto] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> UnityMessage: ... + def FromString(cls, s: builtin___bytes) -> UnityMessageProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"header",u"unity_input",u"unity_output"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"header",u"unity_input",u"unity_output"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"header",u"unity_input",u"unity_output"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"header",b"header",u"unity_input",b"unity_input",u"unity_output",b"unity_output"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"header",b"unity_input",b"unity_output"]) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"header",b"header",u"unity_input",b"unity_input",u"unity_output",b"unity_output"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"header",b"header",u"unity_input",b"unity_input",u"unity_output",b"unity_output"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_output_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/unity_output_pb2.py index 759f532ca2..0d729bf665 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_output_pb2.py +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_output_pb2.py @@ -1,123 +1,83 @@ -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: mlagents/envs/communicator_objects/unity_output.proto import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database - +from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from mlagents.envs.communicator_objects import ( - unity_rl_output_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__output__pb2, -) -from mlagents.envs.communicator_objects import ( - unity_rl_initialization_output_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__initialization__output__pb2, -) +from mlagents.envs.communicator_objects import unity_rl_output_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__output__pb2 +from mlagents.envs.communicator_objects import unity_rl_initialization_output_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__initialization__output__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/unity_output.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\n5mlagents/envs/communicator_objects/unity_output.proto\x12\x14\x63ommunicator_objects\x1a\x38mlagents/envs/communicator_objects/unity_rl_output.proto\x1aGmlagents/envs/communicator_objects/unity_rl_initialization_output.proto"\x9a\x01\n\x0bUnityOutput\x12\x36\n\trl_output\x18\x01 \x01(\x0b\x32#.communicator_objects.UnityRLOutput\x12S\n\x18rl_initialization_output\x18\x02 \x01(\x0b\x32\x31.communicator_objects.UnityRLInitializationOutputB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), - dependencies=[ - mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__output__pb2.DESCRIPTOR, - mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__initialization__output__pb2.DESCRIPTOR, - ], -) + name='mlagents/envs/communicator_objects/unity_output.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n5mlagents/envs/communicator_objects/unity_output.proto\x12\x14\x63ommunicator_objects\x1a\x38mlagents/envs/communicator_objects/unity_rl_output.proto\x1aGmlagents/envs/communicator_objects/unity_rl_initialization_output.proto\"\xa9\x01\n\x10UnityOutputProto\x12;\n\trl_output\x18\x01 \x01(\x0b\x32(.communicator_objects.UnityRLOutputProto\x12X\n\x18rl_initialization_output\x18\x02 \x01(\x0b\x32\x36.communicator_objects.UnityRLInitializationOutputProtoB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') + , + dependencies=[mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__output__pb2.DESCRIPTOR,mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__initialization__output__pb2.DESCRIPTOR,]) -_UNITYOUTPUT = _descriptor.Descriptor( - name="UnityOutput", - full_name="communicator_objects.UnityOutput", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="rl_output", - full_name="communicator_objects.UnityOutput.rl_output", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="rl_initialization_output", - full_name="communicator_objects.UnityOutput.rl_initialization_output", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=211, - serialized_end=365, -) -_UNITYOUTPUT.fields_by_name[ - "rl_output" -].message_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__output__pb2._UNITYRLOUTPUT -) -_UNITYOUTPUT.fields_by_name[ - "rl_initialization_output" -].message_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__initialization__output__pb2._UNITYRLINITIALIZATIONOUTPUT + +_UNITYOUTPUTPROTO = _descriptor.Descriptor( + name='UnityOutputProto', + full_name='communicator_objects.UnityOutputProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='rl_output', full_name='communicator_objects.UnityOutputProto.rl_output', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='rl_initialization_output', full_name='communicator_objects.UnityOutputProto.rl_initialization_output', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=211, + serialized_end=380, ) -DESCRIPTOR.message_types_by_name["UnityOutput"] = _UNITYOUTPUT + +_UNITYOUTPUTPROTO.fields_by_name['rl_output'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__output__pb2._UNITYRLOUTPUTPROTO +_UNITYOUTPUTPROTO.fields_by_name['rl_initialization_output'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_unity__rl__initialization__output__pb2._UNITYRLINITIALIZATIONOUTPUTPROTO +DESCRIPTOR.message_types_by_name['UnityOutputProto'] = _UNITYOUTPUTPROTO _sym_db.RegisterFileDescriptor(DESCRIPTOR) -UnityOutput = _reflection.GeneratedProtocolMessageType( - "UnityOutput", - (_message.Message,), - dict( - DESCRIPTOR=_UNITYOUTPUT, - __module__="mlagents.envs.communicator_objects.unity_output_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.UnityOutput) - ), -) -_sym_db.RegisterMessage(UnityOutput) +UnityOutputProto = _reflection.GeneratedProtocolMessageType('UnityOutputProto', (_message.Message,), dict( + DESCRIPTOR = _UNITYOUTPUTPROTO, + __module__ = 'mlagents.envs.communicator_objects.unity_output_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.UnityOutputProto) + )) +_sym_db.RegisterMessage(UnityOutputProto) -DESCRIPTOR._options = None +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) # @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_output_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/unity_output_pb2.pyi index b7a66ff436..65d55d3f69 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_output_pb2.pyi +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_output_pb2.pyi @@ -1,15 +1,19 @@ # @generated by generate_proto_mypy_stubs.py. Do not edit! import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + from google.protobuf.message import ( Message as google___protobuf___message___Message, ) from mlagents.envs.communicator_objects.unity_rl_initialization_output_pb2 import ( - UnityRLInitializationOutput as mlagents___envs___communicator_objects___unity_rl_initialization_output_pb2___UnityRLInitializationOutput, + UnityRLInitializationOutputProto as mlagents___envs___communicator_objects___unity_rl_initialization_output_pb2___UnityRLInitializationOutputProto, ) from mlagents.envs.communicator_objects.unity_rl_output_pb2 import ( - UnityRLOutput as mlagents___envs___communicator_objects___unity_rl_output_pb2___UnityRLOutput, + UnityRLOutputProto as mlagents___envs___communicator_objects___unity_rl_output_pb2___UnityRLOutputProto, ) from typing import ( @@ -21,25 +25,33 @@ from typing_extensions import ( ) -class UnityOutput(google___protobuf___message___Message): +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + +class UnityOutputProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... @property - def rl_output(self) -> mlagents___envs___communicator_objects___unity_rl_output_pb2___UnityRLOutput: ... + def rl_output(self) -> mlagents___envs___communicator_objects___unity_rl_output_pb2___UnityRLOutputProto: ... @property - def rl_initialization_output(self) -> mlagents___envs___communicator_objects___unity_rl_initialization_output_pb2___UnityRLInitializationOutput: ... + def rl_initialization_output(self) -> mlagents___envs___communicator_objects___unity_rl_initialization_output_pb2___UnityRLInitializationOutputProto: ... def __init__(self, - rl_output : typing___Optional[mlagents___envs___communicator_objects___unity_rl_output_pb2___UnityRLOutput] = None, - rl_initialization_output : typing___Optional[mlagents___envs___communicator_objects___unity_rl_initialization_output_pb2___UnityRLInitializationOutput] = None, + *, + rl_output : typing___Optional[mlagents___envs___communicator_objects___unity_rl_output_pb2___UnityRLOutputProto] = None, + rl_initialization_output : typing___Optional[mlagents___envs___communicator_objects___unity_rl_initialization_output_pb2___UnityRLInitializationOutputProto] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> UnityOutput: ... + def FromString(cls, s: builtin___bytes) -> UnityOutputProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"rl_initialization_output",u"rl_output"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"rl_initialization_output",u"rl_output"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"rl_initialization_output",u"rl_output"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"rl_initialization_output",b"rl_initialization_output",u"rl_output",b"rl_output"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"rl_initialization_output",b"rl_output"]) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"rl_initialization_output",b"rl_initialization_output",u"rl_output",b"rl_output"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"rl_initialization_output",b"rl_initialization_output",u"rl_output",b"rl_output"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_input_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_input_pb2.py index 01dffeac13..54886fd948 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_input_pb2.py +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_input_pb2.py @@ -1,85 +1,71 @@ -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: mlagents/envs/communicator_objects/unity_rl_initialization_input.proto import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database - +from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() + + DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/unity_rl_initialization_input.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\nFmlagents/envs/communicator_objects/unity_rl_initialization_input.proto\x12\x14\x63ommunicator_objects"*\n\x1aUnityRLInitializationInput\x12\x0c\n\x04seed\x18\x01 \x01(\x05\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), + name='mlagents/envs/communicator_objects/unity_rl_initialization_input.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\nFmlagents/envs/communicator_objects/unity_rl_initialization_input.proto\x12\x14\x63ommunicator_objects\"/\n\x1fUnityRLInitializationInputProto\x12\x0c\n\x04seed\x18\x01 \x01(\x05\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') ) -_UNITYRLINITIALIZATIONINPUT = _descriptor.Descriptor( - name="UnityRLInitializationInput", - full_name="communicator_objects.UnityRLInitializationInput", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="seed", - full_name="communicator_objects.UnityRLInitializationInput.seed", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=96, - serialized_end=138, + + +_UNITYRLINITIALIZATIONINPUTPROTO = _descriptor.Descriptor( + name='UnityRLInitializationInputProto', + full_name='communicator_objects.UnityRLInitializationInputProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='seed', full_name='communicator_objects.UnityRLInitializationInputProto.seed', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=96, + serialized_end=143, ) -DESCRIPTOR.message_types_by_name[ - "UnityRLInitializationInput" -] = _UNITYRLINITIALIZATIONINPUT +DESCRIPTOR.message_types_by_name['UnityRLInitializationInputProto'] = _UNITYRLINITIALIZATIONINPUTPROTO _sym_db.RegisterFileDescriptor(DESCRIPTOR) -UnityRLInitializationInput = _reflection.GeneratedProtocolMessageType( - "UnityRLInitializationInput", - (_message.Message,), - dict( - DESCRIPTOR=_UNITYRLINITIALIZATIONINPUT, - __module__="mlagents.envs.communicator_objects.unity_rl_initialization_input_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLInitializationInput) - ), -) -_sym_db.RegisterMessage(UnityRLInitializationInput) +UnityRLInitializationInputProto = _reflection.GeneratedProtocolMessageType('UnityRLInitializationInputProto', (_message.Message,), dict( + DESCRIPTOR = _UNITYRLINITIALIZATIONINPUTPROTO, + __module__ = 'mlagents.envs.communicator_objects.unity_rl_initialization_input_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLInitializationInputProto) + )) +_sym_db.RegisterMessage(UnityRLInitializationInputProto) -DESCRIPTOR._options = None +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) # @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_input_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_input_pb2.pyi index d85a1f54eb..f7e6abf171 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_input_pb2.pyi +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_input_pb2.pyi @@ -1,5 +1,9 @@ # @generated by generate_proto_mypy_stubs.py. Do not edit! import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + from google.protobuf.message import ( Message as google___protobuf___message___Message, ) @@ -13,17 +17,25 @@ from typing_extensions import ( ) -class UnityRLInitializationInput(google___protobuf___message___Message): - seed = ... # type: int +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + +class UnityRLInitializationInputProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + seed = ... # type: builtin___int def __init__(self, - seed : typing___Optional[int] = None, + *, + seed : typing___Optional[builtin___int] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> UnityRLInitializationInput: ... + def FromString(cls, s: builtin___bytes) -> UnityRLInitializationInputProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): def ClearField(self, field_name: typing_extensions___Literal[u"seed"]) -> None: ... else: - def ClearField(self, field_name: typing_extensions___Literal[b"seed"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"seed",b"seed"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_output_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_output_pb2.py index 65ac6f55d3..4c95a1e7d1 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_output_pb2.py +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_output_pb2.py @@ -1,179 +1,104 @@ -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: mlagents/envs/communicator_objects/unity_rl_initialization_output.proto import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database - +from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from mlagents.envs.communicator_objects import ( - brain_parameters_proto_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_brain__parameters__proto__pb2, -) -from mlagents.envs.communicator_objects import ( - environment_parameters_proto_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_environment__parameters__proto__pb2, -) +from mlagents.envs.communicator_objects import brain_parameters_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_brain__parameters__pb2 +from mlagents.envs.communicator_objects import environment_parameters_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_environment__parameters__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/unity_rl_initialization_output.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\nGmlagents/envs/communicator_objects/unity_rl_initialization_output.proto\x12\x14\x63ommunicator_objects\x1a?mlagents/envs/communicator_objects/brain_parameters_proto.proto\x1a\x45mlagents/envs/communicator_objects/environment_parameters_proto.proto"\xe6\x01\n\x1bUnityRLInitializationOutput\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08log_path\x18\x03 \x01(\t\x12\x44\n\x10\x62rain_parameters\x18\x05 \x03(\x0b\x32*.communicator_objects.BrainParametersProto\x12P\n\x16\x65nvironment_parameters\x18\x06 \x01(\x0b\x32\x30.communicator_objects.EnvironmentParametersProtoB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), - dependencies=[ - mlagents_dot_envs_dot_communicator__objects_dot_brain__parameters__proto__pb2.DESCRIPTOR, - mlagents_dot_envs_dot_communicator__objects_dot_environment__parameters__proto__pb2.DESCRIPTOR, - ], -) + name='mlagents/envs/communicator_objects/unity_rl_initialization_output.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\nGmlagents/envs/communicator_objects/unity_rl_initialization_output.proto\x12\x14\x63ommunicator_objects\x1a\x39mlagents/envs/communicator_objects/brain_parameters.proto\x1a?mlagents/envs/communicator_objects/environment_parameters.proto\"\xeb\x01\n UnityRLInitializationOutputProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08log_path\x18\x03 \x01(\t\x12\x44\n\x10\x62rain_parameters\x18\x05 \x03(\x0b\x32*.communicator_objects.BrainParametersProto\x12P\n\x16\x65nvironment_parameters\x18\x06 \x01(\x0b\x32\x30.communicator_objects.EnvironmentParametersProtoB\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') + , + dependencies=[mlagents_dot_envs_dot_communicator__objects_dot_brain__parameters__pb2.DESCRIPTOR,mlagents_dot_envs_dot_communicator__objects_dot_environment__parameters__pb2.DESCRIPTOR,]) -_UNITYRLINITIALIZATIONOUTPUT = _descriptor.Descriptor( - name="UnityRLInitializationOutput", - full_name="communicator_objects.UnityRLInitializationOutput", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="communicator_objects.UnityRLInitializationOutput.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="communicator_objects.UnityRLInitializationOutput.version", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="log_path", - full_name="communicator_objects.UnityRLInitializationOutput.log_path", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="brain_parameters", - full_name="communicator_objects.UnityRLInitializationOutput.brain_parameters", - index=3, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="environment_parameters", - full_name="communicator_objects.UnityRLInitializationOutput.environment_parameters", - index=4, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=234, - serialized_end=464, -) -_UNITYRLINITIALIZATIONOUTPUT.fields_by_name[ - "brain_parameters" -].message_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_brain__parameters__proto__pb2._BRAINPARAMETERSPROTO -) -_UNITYRLINITIALIZATIONOUTPUT.fields_by_name[ - "environment_parameters" -].message_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_environment__parameters__proto__pb2._ENVIRONMENTPARAMETERSPROTO + +_UNITYRLINITIALIZATIONOUTPUTPROTO = _descriptor.Descriptor( + name='UnityRLInitializationOutputProto', + full_name='communicator_objects.UnityRLInitializationOutputProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='communicator_objects.UnityRLInitializationOutputProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='version', full_name='communicator_objects.UnityRLInitializationOutputProto.version', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='log_path', full_name='communicator_objects.UnityRLInitializationOutputProto.log_path', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='brain_parameters', full_name='communicator_objects.UnityRLInitializationOutputProto.brain_parameters', index=3, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='environment_parameters', full_name='communicator_objects.UnityRLInitializationOutputProto.environment_parameters', index=4, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=222, + serialized_end=457, ) -DESCRIPTOR.message_types_by_name[ - "UnityRLInitializationOutput" -] = _UNITYRLINITIALIZATIONOUTPUT + +_UNITYRLINITIALIZATIONOUTPUTPROTO.fields_by_name['brain_parameters'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_brain__parameters__pb2._BRAINPARAMETERSPROTO +_UNITYRLINITIALIZATIONOUTPUTPROTO.fields_by_name['environment_parameters'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_environment__parameters__pb2._ENVIRONMENTPARAMETERSPROTO +DESCRIPTOR.message_types_by_name['UnityRLInitializationOutputProto'] = _UNITYRLINITIALIZATIONOUTPUTPROTO _sym_db.RegisterFileDescriptor(DESCRIPTOR) -UnityRLInitializationOutput = _reflection.GeneratedProtocolMessageType( - "UnityRLInitializationOutput", - (_message.Message,), - dict( - DESCRIPTOR=_UNITYRLINITIALIZATIONOUTPUT, - __module__="mlagents.envs.communicator_objects.unity_rl_initialization_output_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLInitializationOutput) - ), -) -_sym_db.RegisterMessage(UnityRLInitializationOutput) +UnityRLInitializationOutputProto = _reflection.GeneratedProtocolMessageType('UnityRLInitializationOutputProto', (_message.Message,), dict( + DESCRIPTOR = _UNITYRLINITIALIZATIONOUTPUTPROTO, + __module__ = 'mlagents.envs.communicator_objects.unity_rl_initialization_output_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLInitializationOutputProto) + )) +_sym_db.RegisterMessage(UnityRLInitializationOutputProto) -DESCRIPTOR._options = None +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) # @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_output_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_output_pb2.pyi index 6cccfeb9bb..365d6c2a30 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_output_pb2.pyi +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_initialization_output_pb2.pyi @@ -1,5 +1,9 @@ # @generated by generate_proto_mypy_stubs.py. Do not edit! import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + from google.protobuf.internal.containers import ( RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, ) @@ -8,12 +12,12 @@ from google.protobuf.message import ( Message as google___protobuf___message___Message, ) -from mlagents.envs.communicator_objects.brain_parameters_proto_pb2 import ( - BrainParametersProto as mlagents___envs___communicator_objects___brain_parameters_proto_pb2___BrainParametersProto, +from mlagents.envs.communicator_objects.brain_parameters_pb2 import ( + BrainParametersProto as mlagents___envs___communicator_objects___brain_parameters_pb2___BrainParametersProto, ) -from mlagents.envs.communicator_objects.environment_parameters_proto_pb2 import ( - EnvironmentParametersProto as mlagents___envs___communicator_objects___environment_parameters_proto_pb2___EnvironmentParametersProto, +from mlagents.envs.communicator_objects.environment_parameters_pb2 import ( + EnvironmentParametersProto as mlagents___envs___communicator_objects___environment_parameters_pb2___EnvironmentParametersProto, ) from typing import ( @@ -27,31 +31,39 @@ from typing_extensions import ( ) -class UnityRLInitializationOutput(google___protobuf___message___Message): +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + +class UnityRLInitializationOutputProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... name = ... # type: typing___Text version = ... # type: typing___Text log_path = ... # type: typing___Text @property - def brain_parameters(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[mlagents___envs___communicator_objects___brain_parameters_proto_pb2___BrainParametersProto]: ... + def brain_parameters(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[mlagents___envs___communicator_objects___brain_parameters_pb2___BrainParametersProto]: ... @property - def environment_parameters(self) -> mlagents___envs___communicator_objects___environment_parameters_proto_pb2___EnvironmentParametersProto: ... + def environment_parameters(self) -> mlagents___envs___communicator_objects___environment_parameters_pb2___EnvironmentParametersProto: ... def __init__(self, + *, name : typing___Optional[typing___Text] = None, version : typing___Optional[typing___Text] = None, log_path : typing___Optional[typing___Text] = None, - brain_parameters : typing___Optional[typing___Iterable[mlagents___envs___communicator_objects___brain_parameters_proto_pb2___BrainParametersProto]] = None, - environment_parameters : typing___Optional[mlagents___envs___communicator_objects___environment_parameters_proto_pb2___EnvironmentParametersProto] = None, + brain_parameters : typing___Optional[typing___Iterable[mlagents___envs___communicator_objects___brain_parameters_pb2___BrainParametersProto]] = None, + environment_parameters : typing___Optional[mlagents___envs___communicator_objects___environment_parameters_pb2___EnvironmentParametersProto] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> UnityRLInitializationOutput: ... + def FromString(cls, s: builtin___bytes) -> UnityRLInitializationOutputProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"environment_parameters"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"environment_parameters"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"brain_parameters",u"environment_parameters",u"log_path",u"name",u"version"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"environment_parameters",b"environment_parameters"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"brain_parameters",b"environment_parameters",b"log_path",b"name",b"version"]) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"environment_parameters",b"environment_parameters"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"brain_parameters",b"brain_parameters",u"environment_parameters",b"environment_parameters",u"log_path",b"log_path",u"name",b"name",u"version",b"version"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_input_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_input_pb2.py index 16145bfd89..bd726152a1 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_input_pb2.py +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_input_pb2.py @@ -1,291 +1,188 @@ -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: mlagents/envs/communicator_objects/unity_rl_input.proto import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database - +from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from mlagents.envs.communicator_objects import ( - agent_action_proto_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_agent__action__proto__pb2, -) -from mlagents.envs.communicator_objects import ( - environment_parameters_proto_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_environment__parameters__proto__pb2, -) -from mlagents.envs.communicator_objects import ( - command_proto_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_command__proto__pb2, -) +from mlagents.envs.communicator_objects import agent_action_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_agent__action__pb2 +from mlagents.envs.communicator_objects import environment_parameters_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_environment__parameters__pb2 +from mlagents.envs.communicator_objects import command_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_command__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/unity_rl_input.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\n7mlagents/envs/communicator_objects/unity_rl_input.proto\x12\x14\x63ommunicator_objects\x1a;mlagents/envs/communicator_objects/agent_action_proto.proto\x1a\x45mlagents/envs/communicator_objects/environment_parameters_proto.proto\x1a\x36mlagents/envs/communicator_objects/command_proto.proto"\xb4\x03\n\x0cUnityRLInput\x12K\n\ragent_actions\x18\x01 \x03(\x0b\x32\x34.communicator_objects.UnityRLInput.AgentActionsEntry\x12P\n\x16\x65nvironment_parameters\x18\x02 \x01(\x0b\x32\x30.communicator_objects.EnvironmentParametersProto\x12\x13\n\x0bis_training\x18\x03 \x01(\x08\x12\x33\n\x07\x63ommand\x18\x04 \x01(\x0e\x32".communicator_objects.CommandProto\x1aM\n\x14ListAgentActionProto\x12\x35\n\x05value\x18\x01 \x03(\x0b\x32&.communicator_objects.AgentActionProto\x1al\n\x11\x41gentActionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x46\n\x05value\x18\x02 \x01(\x0b\x32\x37.communicator_objects.UnityRLInput.ListAgentActionProto:\x02\x38\x01\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), - dependencies=[ - mlagents_dot_envs_dot_communicator__objects_dot_agent__action__proto__pb2.DESCRIPTOR, - mlagents_dot_envs_dot_communicator__objects_dot_environment__parameters__proto__pb2.DESCRIPTOR, - mlagents_dot_envs_dot_communicator__objects_dot_command__proto__pb2.DESCRIPTOR, - ], -) - - -_UNITYRLINPUT_LISTAGENTACTIONPROTO = _descriptor.Descriptor( - name="ListAgentActionProto", - full_name="communicator_objects.UnityRLInput.ListAgentActionProto", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="value", - full_name="communicator_objects.UnityRLInput.ListAgentActionProto.value", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=519, - serialized_end=596, + name='mlagents/envs/communicator_objects/unity_rl_input.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n7mlagents/envs/communicator_objects/unity_rl_input.proto\x12\x14\x63ommunicator_objects\x1a\x35mlagents/envs/communicator_objects/agent_action.proto\x1a?mlagents/envs/communicator_objects/environment_parameters.proto\x1a\x30mlagents/envs/communicator_objects/command.proto\"\xc3\x03\n\x11UnityRLInputProto\x12P\n\ragent_actions\x18\x01 \x03(\x0b\x32\x39.communicator_objects.UnityRLInputProto.AgentActionsEntry\x12P\n\x16\x65nvironment_parameters\x18\x02 \x01(\x0b\x32\x30.communicator_objects.EnvironmentParametersProto\x12\x13\n\x0bis_training\x18\x03 \x01(\x08\x12\x33\n\x07\x63ommand\x18\x04 \x01(\x0e\x32\".communicator_objects.CommandProto\x1aM\n\x14ListAgentActionProto\x12\x35\n\x05value\x18\x01 \x03(\x0b\x32&.communicator_objects.AgentActionProto\x1aq\n\x11\x41gentActionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12K\n\x05value\x18\x02 \x01(\x0b\x32<.communicator_objects.UnityRLInputProto.ListAgentActionProto:\x02\x38\x01\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') + , + dependencies=[mlagents_dot_envs_dot_communicator__objects_dot_agent__action__pb2.DESCRIPTOR,mlagents_dot_envs_dot_communicator__objects_dot_environment__parameters__pb2.DESCRIPTOR,mlagents_dot_envs_dot_communicator__objects_dot_command__pb2.DESCRIPTOR,]) + + + + +_UNITYRLINPUTPROTO_LISTAGENTACTIONPROTO = _descriptor.Descriptor( + name='ListAgentActionProto', + full_name='communicator_objects.UnityRLInputProto.ListAgentActionProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='communicator_objects.UnityRLInputProto.ListAgentActionProto.value', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=511, + serialized_end=588, ) -_UNITYRLINPUT_AGENTACTIONSENTRY = _descriptor.Descriptor( - name="AgentActionsEntry", - full_name="communicator_objects.UnityRLInput.AgentActionsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="communicator_objects.UnityRLInput.AgentActionsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="communicator_objects.UnityRLInput.AgentActionsEntry.value", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=598, - serialized_end=706, +_UNITYRLINPUTPROTO_AGENTACTIONSENTRY = _descriptor.Descriptor( + name='AgentActionsEntry', + full_name='communicator_objects.UnityRLInputProto.AgentActionsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='communicator_objects.UnityRLInputProto.AgentActionsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='communicator_objects.UnityRLInputProto.AgentActionsEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=590, + serialized_end=703, ) -_UNITYRLINPUT = _descriptor.Descriptor( - name="UnityRLInput", - full_name="communicator_objects.UnityRLInput", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="agent_actions", - full_name="communicator_objects.UnityRLInput.agent_actions", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="environment_parameters", - full_name="communicator_objects.UnityRLInput.environment_parameters", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="is_training", - full_name="communicator_objects.UnityRLInput.is_training", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="command", - full_name="communicator_objects.UnityRLInput.command", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_UNITYRLINPUT_LISTAGENTACTIONPROTO, _UNITYRLINPUT_AGENTACTIONSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=270, - serialized_end=706, +_UNITYRLINPUTPROTO = _descriptor.Descriptor( + name='UnityRLInputProto', + full_name='communicator_objects.UnityRLInputProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='agent_actions', full_name='communicator_objects.UnityRLInputProto.agent_actions', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='environment_parameters', full_name='communicator_objects.UnityRLInputProto.environment_parameters', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='is_training', full_name='communicator_objects.UnityRLInputProto.is_training', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='command', full_name='communicator_objects.UnityRLInputProto.command', index=3, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_UNITYRLINPUTPROTO_LISTAGENTACTIONPROTO, _UNITYRLINPUTPROTO_AGENTACTIONSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=252, + serialized_end=703, ) -_UNITYRLINPUT_LISTAGENTACTIONPROTO.fields_by_name[ - "value" -].message_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_agent__action__proto__pb2._AGENTACTIONPROTO -) -_UNITYRLINPUT_LISTAGENTACTIONPROTO.containing_type = _UNITYRLINPUT -_UNITYRLINPUT_AGENTACTIONSENTRY.fields_by_name[ - "value" -].message_type = _UNITYRLINPUT_LISTAGENTACTIONPROTO -_UNITYRLINPUT_AGENTACTIONSENTRY.containing_type = _UNITYRLINPUT -_UNITYRLINPUT.fields_by_name[ - "agent_actions" -].message_type = _UNITYRLINPUT_AGENTACTIONSENTRY -_UNITYRLINPUT.fields_by_name[ - "environment_parameters" -].message_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_environment__parameters__proto__pb2._ENVIRONMENTPARAMETERSPROTO -) -_UNITYRLINPUT.fields_by_name[ - "command" -].enum_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_command__proto__pb2._COMMANDPROTO -) -DESCRIPTOR.message_types_by_name["UnityRLInput"] = _UNITYRLINPUT +_UNITYRLINPUTPROTO_LISTAGENTACTIONPROTO.fields_by_name['value'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_agent__action__pb2._AGENTACTIONPROTO +_UNITYRLINPUTPROTO_LISTAGENTACTIONPROTO.containing_type = _UNITYRLINPUTPROTO +_UNITYRLINPUTPROTO_AGENTACTIONSENTRY.fields_by_name['value'].message_type = _UNITYRLINPUTPROTO_LISTAGENTACTIONPROTO +_UNITYRLINPUTPROTO_AGENTACTIONSENTRY.containing_type = _UNITYRLINPUTPROTO +_UNITYRLINPUTPROTO.fields_by_name['agent_actions'].message_type = _UNITYRLINPUTPROTO_AGENTACTIONSENTRY +_UNITYRLINPUTPROTO.fields_by_name['environment_parameters'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_environment__parameters__pb2._ENVIRONMENTPARAMETERSPROTO +_UNITYRLINPUTPROTO.fields_by_name['command'].enum_type = mlagents_dot_envs_dot_communicator__objects_dot_command__pb2._COMMANDPROTO +DESCRIPTOR.message_types_by_name['UnityRLInputProto'] = _UNITYRLINPUTPROTO _sym_db.RegisterFileDescriptor(DESCRIPTOR) -UnityRLInput = _reflection.GeneratedProtocolMessageType( - "UnityRLInput", - (_message.Message,), - dict( - ListAgentActionProto=_reflection.GeneratedProtocolMessageType( - "ListAgentActionProto", - (_message.Message,), - dict( - DESCRIPTOR=_UNITYRLINPUT_LISTAGENTACTIONPROTO, - __module__="mlagents.envs.communicator_objects.unity_rl_input_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLInput.ListAgentActionProto) - ), - ), - AgentActionsEntry=_reflection.GeneratedProtocolMessageType( - "AgentActionsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_UNITYRLINPUT_AGENTACTIONSENTRY, - __module__="mlagents.envs.communicator_objects.unity_rl_input_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLInput.AgentActionsEntry) - ), - ), - DESCRIPTOR=_UNITYRLINPUT, - __module__="mlagents.envs.communicator_objects.unity_rl_input_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLInput) - ), -) -_sym_db.RegisterMessage(UnityRLInput) -_sym_db.RegisterMessage(UnityRLInput.ListAgentActionProto) -_sym_db.RegisterMessage(UnityRLInput.AgentActionsEntry) - - -DESCRIPTOR._options = None -_UNITYRLINPUT_AGENTACTIONSENTRY._options = None +UnityRLInputProto = _reflection.GeneratedProtocolMessageType('UnityRLInputProto', (_message.Message,), dict( + + ListAgentActionProto = _reflection.GeneratedProtocolMessageType('ListAgentActionProto', (_message.Message,), dict( + DESCRIPTOR = _UNITYRLINPUTPROTO_LISTAGENTACTIONPROTO, + __module__ = 'mlagents.envs.communicator_objects.unity_rl_input_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLInputProto.ListAgentActionProto) + )) + , + + AgentActionsEntry = _reflection.GeneratedProtocolMessageType('AgentActionsEntry', (_message.Message,), dict( + DESCRIPTOR = _UNITYRLINPUTPROTO_AGENTACTIONSENTRY, + __module__ = 'mlagents.envs.communicator_objects.unity_rl_input_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLInputProto.AgentActionsEntry) + )) + , + DESCRIPTOR = _UNITYRLINPUTPROTO, + __module__ = 'mlagents.envs.communicator_objects.unity_rl_input_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLInputProto) + )) +_sym_db.RegisterMessage(UnityRLInputProto) +_sym_db.RegisterMessage(UnityRLInputProto.ListAgentActionProto) +_sym_db.RegisterMessage(UnityRLInputProto.AgentActionsEntry) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) +_UNITYRLINPUTPROTO_AGENTACTIONSENTRY.has_options = True +_UNITYRLINPUTPROTO_AGENTACTIONSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) # @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_input_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_input_pb2.pyi index 1724000b4a..9cefffdced 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_input_pb2.pyi +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_input_pb2.pyi @@ -1,5 +1,9 @@ # @generated by generate_proto_mypy_stubs.py. Do not edit! import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + from google.protobuf.internal.containers import ( RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, ) @@ -8,16 +12,16 @@ from google.protobuf.message import ( Message as google___protobuf___message___Message, ) -from mlagents.envs.communicator_objects.agent_action_proto_pb2 import ( - AgentActionProto as mlagents___envs___communicator_objects___agent_action_proto_pb2___AgentActionProto, +from mlagents.envs.communicator_objects.agent_action_pb2 import ( + AgentActionProto as mlagents___envs___communicator_objects___agent_action_pb2___AgentActionProto, ) -from mlagents.envs.communicator_objects.command_proto_pb2 import ( - CommandProto as mlagents___envs___communicator_objects___command_proto_pb2___CommandProto, +from mlagents.envs.communicator_objects.command_pb2 import ( + CommandProto as mlagents___envs___communicator_objects___command_pb2___CommandProto, ) -from mlagents.envs.communicator_objects.environment_parameters_proto_pb2 import ( - EnvironmentParametersProto as mlagents___envs___communicator_objects___environment_parameters_proto_pb2___EnvironmentParametersProto, +from mlagents.envs.communicator_objects.environment_parameters_pb2 import ( + EnvironmentParametersProto as mlagents___envs___communicator_objects___environment_parameters_pb2___EnvironmentParametersProto, ) from typing import ( @@ -33,67 +37,79 @@ from typing_extensions import ( ) -class UnityRLInput(google___protobuf___message___Message): +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + +class UnityRLInputProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... class ListAgentActionProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... @property - def value(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[mlagents___envs___communicator_objects___agent_action_proto_pb2___AgentActionProto]: ... + def value(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[mlagents___envs___communicator_objects___agent_action_pb2___AgentActionProto]: ... def __init__(self, - value : typing___Optional[typing___Iterable[mlagents___envs___communicator_objects___agent_action_proto_pb2___AgentActionProto]] = None, + *, + value : typing___Optional[typing___Iterable[mlagents___envs___communicator_objects___agent_action_pb2___AgentActionProto]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> UnityRLInput.ListAgentActionProto: ... + def FromString(cls, s: builtin___bytes) -> UnityRLInputProto.ListAgentActionProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): def ClearField(self, field_name: typing_extensions___Literal[u"value"]) -> None: ... else: - def ClearField(self, field_name: typing_extensions___Literal[b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> None: ... class AgentActionsEntry(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... key = ... # type: typing___Text @property - def value(self) -> UnityRLInput.ListAgentActionProto: ... + def value(self) -> UnityRLInputProto.ListAgentActionProto: ... def __init__(self, + *, key : typing___Optional[typing___Text] = None, - value : typing___Optional[UnityRLInput.ListAgentActionProto] = None, + value : typing___Optional[UnityRLInputProto.ListAgentActionProto] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> UnityRLInput.AgentActionsEntry: ... + def FromString(cls, s: builtin___bytes) -> UnityRLInputProto.AgentActionsEntry: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"value"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"value"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"key",b"key",u"value",b"value"]) -> None: ... - is_training = ... # type: bool - command = ... # type: mlagents___envs___communicator_objects___command_proto_pb2___CommandProto + is_training = ... # type: builtin___bool + command = ... # type: mlagents___envs___communicator_objects___command_pb2___CommandProto @property - def agent_actions(self) -> typing___MutableMapping[typing___Text, UnityRLInput.ListAgentActionProto]: ... + def agent_actions(self) -> typing___MutableMapping[typing___Text, UnityRLInputProto.ListAgentActionProto]: ... @property - def environment_parameters(self) -> mlagents___envs___communicator_objects___environment_parameters_proto_pb2___EnvironmentParametersProto: ... + def environment_parameters(self) -> mlagents___envs___communicator_objects___environment_parameters_pb2___EnvironmentParametersProto: ... def __init__(self, - agent_actions : typing___Optional[typing___Mapping[typing___Text, UnityRLInput.ListAgentActionProto]] = None, - environment_parameters : typing___Optional[mlagents___envs___communicator_objects___environment_parameters_proto_pb2___EnvironmentParametersProto] = None, - is_training : typing___Optional[bool] = None, - command : typing___Optional[mlagents___envs___communicator_objects___command_proto_pb2___CommandProto] = None, + *, + agent_actions : typing___Optional[typing___Mapping[typing___Text, UnityRLInputProto.ListAgentActionProto]] = None, + environment_parameters : typing___Optional[mlagents___envs___communicator_objects___environment_parameters_pb2___EnvironmentParametersProto] = None, + is_training : typing___Optional[builtin___bool] = None, + command : typing___Optional[mlagents___envs___communicator_objects___command_pb2___CommandProto] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> UnityRLInput: ... + def FromString(cls, s: builtin___bytes) -> UnityRLInputProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"environment_parameters"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"environment_parameters"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"agent_actions",u"command",u"environment_parameters",u"is_training"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"environment_parameters",b"environment_parameters"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"agent_actions",b"command",b"environment_parameters",b"is_training"]) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"environment_parameters",b"environment_parameters"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"agent_actions",b"agent_actions",u"command",b"command",u"environment_parameters",b"environment_parameters",u"is_training",b"is_training"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_output_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_output_pb2.py index 01e10a33f9..6bf33beb6b 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_output_pb2.py +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_output_pb2.py @@ -1,237 +1,163 @@ -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: mlagents/envs/communicator_objects/unity_rl_output.proto import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database - +from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from mlagents.envs.communicator_objects import ( - agent_info_proto_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_agent__info__proto__pb2, -) +from mlagents.envs.communicator_objects import agent_info_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_agent__info__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/unity_rl_output.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\n8mlagents/envs/communicator_objects/unity_rl_output.proto\x12\x14\x63ommunicator_objects\x1a\x39mlagents/envs/communicator_objects/agent_info_proto.proto"\xa3\x02\n\rUnityRLOutput\x12\x13\n\x0bglobal_done\x18\x01 \x01(\x08\x12G\n\nagentInfos\x18\x02 \x03(\x0b\x32\x33.communicator_objects.UnityRLOutput.AgentInfosEntry\x1aI\n\x12ListAgentInfoProto\x12\x33\n\x05value\x18\x01 \x03(\x0b\x32$.communicator_objects.AgentInfoProto\x1ai\n\x0f\x41gentInfosEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x45\n\x05value\x18\x02 \x01(\x0b\x32\x36.communicator_objects.UnityRLOutput.ListAgentInfoProto:\x02\x38\x01\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), - dependencies=[ - mlagents_dot_envs_dot_communicator__objects_dot_agent__info__proto__pb2.DESCRIPTOR - ], -) - - -_UNITYRLOUTPUT_LISTAGENTINFOPROTO = _descriptor.Descriptor( - name="ListAgentInfoProto", - full_name="communicator_objects.UnityRLOutput.ListAgentInfoProto", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="value", - full_name="communicator_objects.UnityRLOutput.ListAgentInfoProto.value", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=253, - serialized_end=326, + name='mlagents/envs/communicator_objects/unity_rl_output.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n8mlagents/envs/communicator_objects/unity_rl_output.proto\x12\x14\x63ommunicator_objects\x1a\x33mlagents/envs/communicator_objects/agent_info.proto\"\xa3\x02\n\x12UnityRLOutputProto\x12L\n\nagentInfos\x18\x02 \x03(\x0b\x32\x38.communicator_objects.UnityRLOutputProto.AgentInfosEntry\x1aI\n\x12ListAgentInfoProto\x12\x33\n\x05value\x18\x01 \x03(\x0b\x32$.communicator_objects.AgentInfoProto\x1an\n\x0f\x41gentInfosEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12J\n\x05value\x18\x02 \x01(\x0b\x32;.communicator_objects.UnityRLOutputProto.ListAgentInfoProto:\x02\x38\x01J\x04\x08\x01\x10\x02\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') + , + dependencies=[mlagents_dot_envs_dot_communicator__objects_dot_agent__info__pb2.DESCRIPTOR,]) + + + + +_UNITYRLOUTPUTPROTO_LISTAGENTINFOPROTO = _descriptor.Descriptor( + name='ListAgentInfoProto', + full_name='communicator_objects.UnityRLOutputProto.ListAgentInfoProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='communicator_objects.UnityRLOutputProto.ListAgentInfoProto.value', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=236, + serialized_end=309, ) -_UNITYRLOUTPUT_AGENTINFOSENTRY = _descriptor.Descriptor( - name="AgentInfosEntry", - full_name="communicator_objects.UnityRLOutput.AgentInfosEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="communicator_objects.UnityRLOutput.AgentInfosEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="communicator_objects.UnityRLOutput.AgentInfosEntry.value", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=328, - serialized_end=433, +_UNITYRLOUTPUTPROTO_AGENTINFOSENTRY = _descriptor.Descriptor( + name='AgentInfosEntry', + full_name='communicator_objects.UnityRLOutputProto.AgentInfosEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='communicator_objects.UnityRLOutputProto.AgentInfosEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='communicator_objects.UnityRLOutputProto.AgentInfosEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=311, + serialized_end=421, ) -_UNITYRLOUTPUT = _descriptor.Descriptor( - name="UnityRLOutput", - full_name="communicator_objects.UnityRLOutput", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="global_done", - full_name="communicator_objects.UnityRLOutput.global_done", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="agentInfos", - full_name="communicator_objects.UnityRLOutput.agentInfos", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_UNITYRLOUTPUT_LISTAGENTINFOPROTO, _UNITYRLOUTPUT_AGENTINFOSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=142, - serialized_end=433, +_UNITYRLOUTPUTPROTO = _descriptor.Descriptor( + name='UnityRLOutputProto', + full_name='communicator_objects.UnityRLOutputProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='agentInfos', full_name='communicator_objects.UnityRLOutputProto.agentInfos', index=0, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_UNITYRLOUTPUTPROTO_LISTAGENTINFOPROTO, _UNITYRLOUTPUTPROTO_AGENTINFOSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=136, + serialized_end=427, ) -_UNITYRLOUTPUT_LISTAGENTINFOPROTO.fields_by_name[ - "value" -].message_type = ( - mlagents_dot_envs_dot_communicator__objects_dot_agent__info__proto__pb2._AGENTINFOPROTO -) -_UNITYRLOUTPUT_LISTAGENTINFOPROTO.containing_type = _UNITYRLOUTPUT -_UNITYRLOUTPUT_AGENTINFOSENTRY.fields_by_name[ - "value" -].message_type = _UNITYRLOUTPUT_LISTAGENTINFOPROTO -_UNITYRLOUTPUT_AGENTINFOSENTRY.containing_type = _UNITYRLOUTPUT -_UNITYRLOUTPUT.fields_by_name[ - "agentInfos" -].message_type = _UNITYRLOUTPUT_AGENTINFOSENTRY -DESCRIPTOR.message_types_by_name["UnityRLOutput"] = _UNITYRLOUTPUT +_UNITYRLOUTPUTPROTO_LISTAGENTINFOPROTO.fields_by_name['value'].message_type = mlagents_dot_envs_dot_communicator__objects_dot_agent__info__pb2._AGENTINFOPROTO +_UNITYRLOUTPUTPROTO_LISTAGENTINFOPROTO.containing_type = _UNITYRLOUTPUTPROTO +_UNITYRLOUTPUTPROTO_AGENTINFOSENTRY.fields_by_name['value'].message_type = _UNITYRLOUTPUTPROTO_LISTAGENTINFOPROTO +_UNITYRLOUTPUTPROTO_AGENTINFOSENTRY.containing_type = _UNITYRLOUTPUTPROTO +_UNITYRLOUTPUTPROTO.fields_by_name['agentInfos'].message_type = _UNITYRLOUTPUTPROTO_AGENTINFOSENTRY +DESCRIPTOR.message_types_by_name['UnityRLOutputProto'] = _UNITYRLOUTPUTPROTO _sym_db.RegisterFileDescriptor(DESCRIPTOR) -UnityRLOutput = _reflection.GeneratedProtocolMessageType( - "UnityRLOutput", - (_message.Message,), - dict( - ListAgentInfoProto=_reflection.GeneratedProtocolMessageType( - "ListAgentInfoProto", - (_message.Message,), - dict( - DESCRIPTOR=_UNITYRLOUTPUT_LISTAGENTINFOPROTO, - __module__="mlagents.envs.communicator_objects.unity_rl_output_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLOutput.ListAgentInfoProto) - ), - ), - AgentInfosEntry=_reflection.GeneratedProtocolMessageType( - "AgentInfosEntry", - (_message.Message,), - dict( - DESCRIPTOR=_UNITYRLOUTPUT_AGENTINFOSENTRY, - __module__="mlagents.envs.communicator_objects.unity_rl_output_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLOutput.AgentInfosEntry) - ), - ), - DESCRIPTOR=_UNITYRLOUTPUT, - __module__="mlagents.envs.communicator_objects.unity_rl_output_pb2" - # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLOutput) - ), -) -_sym_db.RegisterMessage(UnityRLOutput) -_sym_db.RegisterMessage(UnityRLOutput.ListAgentInfoProto) -_sym_db.RegisterMessage(UnityRLOutput.AgentInfosEntry) - - -DESCRIPTOR._options = None -_UNITYRLOUTPUT_AGENTINFOSENTRY._options = None +UnityRLOutputProto = _reflection.GeneratedProtocolMessageType('UnityRLOutputProto', (_message.Message,), dict( + + ListAgentInfoProto = _reflection.GeneratedProtocolMessageType('ListAgentInfoProto', (_message.Message,), dict( + DESCRIPTOR = _UNITYRLOUTPUTPROTO_LISTAGENTINFOPROTO, + __module__ = 'mlagents.envs.communicator_objects.unity_rl_output_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLOutputProto.ListAgentInfoProto) + )) + , + + AgentInfosEntry = _reflection.GeneratedProtocolMessageType('AgentInfosEntry', (_message.Message,), dict( + DESCRIPTOR = _UNITYRLOUTPUTPROTO_AGENTINFOSENTRY, + __module__ = 'mlagents.envs.communicator_objects.unity_rl_output_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLOutputProto.AgentInfosEntry) + )) + , + DESCRIPTOR = _UNITYRLOUTPUTPROTO, + __module__ = 'mlagents.envs.communicator_objects.unity_rl_output_pb2' + # @@protoc_insertion_point(class_scope:communicator_objects.UnityRLOutputProto) + )) +_sym_db.RegisterMessage(UnityRLOutputProto) +_sym_db.RegisterMessage(UnityRLOutputProto.ListAgentInfoProto) +_sym_db.RegisterMessage(UnityRLOutputProto.AgentInfosEntry) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) +_UNITYRLOUTPUTPROTO_AGENTINFOSENTRY.has_options = True +_UNITYRLOUTPUTPROTO_AGENTINFOSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) # @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_output_pb2.pyi b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_output_pb2.pyi index a75e130d8e..a1e289f0fc 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_output_pb2.pyi +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_rl_output_pb2.pyi @@ -1,5 +1,9 @@ # @generated by generate_proto_mypy_stubs.py. Do not edit! import sys +from google.protobuf.descriptor import ( + Descriptor as google___protobuf___descriptor___Descriptor, +) + from google.protobuf.internal.containers import ( RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, ) @@ -8,8 +12,8 @@ from google.protobuf.message import ( Message as google___protobuf___message___Message, ) -from mlagents.envs.communicator_objects.agent_info_proto_pb2 import ( - AgentInfoProto as mlagents___envs___communicator_objects___agent_info_proto_pb2___AgentInfoProto, +from mlagents.envs.communicator_objects.agent_info_pb2 import ( + AgentInfoProto as mlagents___envs___communicator_objects___agent_info_pb2___AgentInfoProto, ) from typing import ( @@ -25,59 +29,69 @@ from typing_extensions import ( ) -class UnityRLOutput(google___protobuf___message___Message): +builtin___bool = bool +builtin___bytes = bytes +builtin___float = float +builtin___int = int + + +class UnityRLOutputProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... class ListAgentInfoProto(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... @property - def value(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[mlagents___envs___communicator_objects___agent_info_proto_pb2___AgentInfoProto]: ... + def value(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[mlagents___envs___communicator_objects___agent_info_pb2___AgentInfoProto]: ... def __init__(self, - value : typing___Optional[typing___Iterable[mlagents___envs___communicator_objects___agent_info_proto_pb2___AgentInfoProto]] = None, + *, + value : typing___Optional[typing___Iterable[mlagents___envs___communicator_objects___agent_info_pb2___AgentInfoProto]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> UnityRLOutput.ListAgentInfoProto: ... + def FromString(cls, s: builtin___bytes) -> UnityRLOutputProto.ListAgentInfoProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): def ClearField(self, field_name: typing_extensions___Literal[u"value"]) -> None: ... else: - def ClearField(self, field_name: typing_extensions___Literal[b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> None: ... class AgentInfosEntry(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... key = ... # type: typing___Text @property - def value(self) -> UnityRLOutput.ListAgentInfoProto: ... + def value(self) -> UnityRLOutputProto.ListAgentInfoProto: ... def __init__(self, + *, key : typing___Optional[typing___Text] = None, - value : typing___Optional[UnityRLOutput.ListAgentInfoProto] = None, + value : typing___Optional[UnityRLOutputProto.ListAgentInfoProto] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> UnityRLOutput.AgentInfosEntry: ... + def FromString(cls, s: builtin___bytes) -> UnityRLOutputProto.AgentInfosEntry: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"value"]) -> bool: ... + def HasField(self, field_name: typing_extensions___Literal[u"value"]) -> builtin___bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... else: - def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> builtin___bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"key",b"key",u"value",b"value"]) -> None: ... - global_done = ... # type: bool @property - def agentInfos(self) -> typing___MutableMapping[typing___Text, UnityRLOutput.ListAgentInfoProto]: ... + def agentInfos(self) -> typing___MutableMapping[typing___Text, UnityRLOutputProto.ListAgentInfoProto]: ... def __init__(self, - global_done : typing___Optional[bool] = None, - agentInfos : typing___Optional[typing___Mapping[typing___Text, UnityRLOutput.ListAgentInfoProto]] = None, + *, + agentInfos : typing___Optional[typing___Mapping[typing___Text, UnityRLOutputProto.ListAgentInfoProto]] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> UnityRLOutput: ... + def FromString(cls, s: builtin___bytes) -> UnityRLOutputProto: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"agentInfos",u"global_done"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"agentInfos"]) -> None: ... else: - def ClearField(self, field_name: typing_extensions___Literal[b"agentInfos",b"global_done"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"agentInfos",b"agentInfos"]) -> None: ... diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_to_external_pb2.py b/ml-agents-envs/mlagents/envs/communicator_objects/unity_to_external_pb2.py index d1a8c7f60a..5e18fa5204 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_to_external_pb2.py +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_to_external_pb2.py @@ -1,66 +1,58 @@ -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: mlagents/envs/communicator_objects/unity_to_external.proto import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database - +from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from mlagents.envs.communicator_objects import ( - unity_message_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2, -) +from mlagents.envs.communicator_objects import unity_message_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name="mlagents/envs/communicator_objects/unity_to_external.proto", - package="communicator_objects", - syntax="proto3", - serialized_options=_b("\252\002\034MLAgents.CommunicatorObjects"), - serialized_pb=_b( - '\n:mlagents/envs/communicator_objects/unity_to_external.proto\x12\x14\x63ommunicator_objects\x1a\x36mlagents/envs/communicator_objects/unity_message.proto2g\n\x0fUnityToExternal\x12T\n\x08\x45xchange\x12".communicator_objects.UnityMessage\x1a".communicator_objects.UnityMessage"\x00\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3' - ), - dependencies=[ - mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2.DESCRIPTOR - ], -) + name='mlagents/envs/communicator_objects/unity_to_external.proto', + package='communicator_objects', + syntax='proto3', + serialized_pb=_b('\n:mlagents/envs/communicator_objects/unity_to_external.proto\x12\x14\x63ommunicator_objects\x1a\x36mlagents/envs/communicator_objects/unity_message.proto2v\n\x14UnityToExternalProto\x12^\n\x08\x45xchange\x12\'.communicator_objects.UnityMessageProto\x1a\'.communicator_objects.UnityMessageProto\"\x00\x42\x1f\xaa\x02\x1cMLAgents.CommunicatorObjectsb\x06proto3') + , + dependencies=[mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) -DESCRIPTOR._options = None -_UNITYTOEXTERNAL = _descriptor.ServiceDescriptor( - name="UnityToExternal", - full_name="communicator_objects.UnityToExternal", - file=DESCRIPTOR, +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\252\002\034MLAgents.CommunicatorObjects')) + +_UNITYTOEXTERNALPROTO = _descriptor.ServiceDescriptor( + name='UnityToExternalProto', + full_name='communicator_objects.UnityToExternalProto', + file=DESCRIPTOR, + index=0, + options=None, + serialized_start=140, + serialized_end=258, + methods=[ + _descriptor.MethodDescriptor( + name='Exchange', + full_name='communicator_objects.UnityToExternalProto.Exchange', index=0, - serialized_options=None, - serialized_start=140, - serialized_end=243, - methods=[ - _descriptor.MethodDescriptor( - name="Exchange", - full_name="communicator_objects.UnityToExternal.Exchange", - index=0, - containing_service=None, - input_type=mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2._UNITYMESSAGE, - output_type=mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2._UNITYMESSAGE, - serialized_options=None, - ) - ], -) -_sym_db.RegisterServiceDescriptor(_UNITYTOEXTERNAL) - -DESCRIPTOR.services_by_name["UnityToExternal"] = _UNITYTOEXTERNAL + containing_service=None, + input_type=mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2._UNITYMESSAGEPROTO, + output_type=mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2._UNITYMESSAGEPROTO, + options=None, + ), +]) +_sym_db.RegisterServiceDescriptor(_UNITYTOEXTERNALPROTO) + +DESCRIPTOR.services_by_name['UnityToExternalProto'] = _UNITYTOEXTERNALPROTO # @@protoc_insertion_point(module_scope) diff --git a/ml-agents-envs/mlagents/envs/communicator_objects/unity_to_external_pb2_grpc.py b/ml-agents-envs/mlagents/envs/communicator_objects/unity_to_external_pb2_grpc.py index 9dafe0dfd2..00e3a7d3e1 100644 --- a/ml-agents-envs/mlagents/envs/communicator_objects/unity_to_external_pb2_grpc.py +++ b/ml-agents-envs/mlagents/envs/communicator_objects/unity_to_external_pb2_grpc.py @@ -1,49 +1,46 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -from mlagents.envs.communicator_objects import ( - unity_message_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2, -) +from mlagents.envs.communicator_objects import unity_message_pb2 as mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2 -class UnityToExternalStub(object): - # missing associated documentation comment in .proto file - pass +class UnityToExternalProtoStub(object): + # missing associated documentation comment in .proto file + pass - def __init__(self, channel): - """Constructor. + def __init__(self, channel): + """Constructor. Args: channel: A grpc.Channel. """ - self.Exchange = channel.unary_unary( - "/communicator_objects.UnityToExternal/Exchange", - request_serializer=mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2.UnityMessage.SerializeToString, - response_deserializer=mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2.UnityMessage.FromString, + self.Exchange = channel.unary_unary( + '/communicator_objects.UnityToExternalProto/Exchange', + request_serializer=mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2.UnityMessageProto.SerializeToString, + response_deserializer=mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2.UnityMessageProto.FromString, ) -class UnityToExternalServicer(object): - # missing associated documentation comment in .proto file - pass +class UnityToExternalProtoServicer(object): + # missing associated documentation comment in .proto file + pass - def Exchange(self, request, context): - """Sends the academy parameters + def Exchange(self, request, context): + """Sends the academy parameters """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_UnityToExternalServicer_to_server(servicer, server): - rpc_method_handlers = { - "Exchange": grpc.unary_unary_rpc_method_handler( - servicer.Exchange, - request_deserializer=mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2.UnityMessage.FromString, - response_serializer=mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2.UnityMessage.SerializeToString, - ) - } - generic_handler = grpc.method_handlers_generic_handler( - "communicator_objects.UnityToExternal", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_UnityToExternalProtoServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Exchange': grpc.unary_unary_rpc_method_handler( + servicer.Exchange, + request_deserializer=mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2.UnityMessageProto.FromString, + response_serializer=mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2.UnityMessageProto.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'communicator_objects.UnityToExternalProto', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/ml-agents-envs/mlagents/envs/environment.py b/ml-agents-envs/mlagents/envs/environment.py index f7a6522272..52fd8132cd 100644 --- a/ml-agents-envs/mlagents/envs/environment.py +++ b/ml-agents-envs/mlagents/envs/environment.py @@ -16,23 +16,23 @@ UnityTimeOutException, ) -from mlagents.envs.communicator_objects.unity_rl_input_pb2 import UnityRLInput -from mlagents.envs.communicator_objects.unity_rl_output_pb2 import UnityRLOutput -from mlagents.envs.communicator_objects.agent_action_proto_pb2 import AgentActionProto -from mlagents.envs.communicator_objects.environment_parameters_proto_pb2 import ( +from mlagents.envs.communicator_objects.unity_rl_input_pb2 import UnityRLInputProto +from mlagents.envs.communicator_objects.unity_rl_output_pb2 import UnityRLOutputProto +from mlagents.envs.communicator_objects.agent_action_pb2 import AgentActionProto +from mlagents.envs.communicator_objects.environment_parameters_pb2 import ( EnvironmentParametersProto, ) +from mlagents.envs.communicator_objects.unity_output_pb2 import UnityOutputProto from mlagents.envs.communicator_objects.unity_rl_initialization_input_pb2 import ( - UnityRLInitializationInput, + UnityRLInitializationInputProto, ) -from mlagents.envs.communicator_objects.unity_rl_initialization_output_pb2 import ( - UnityRLInitializationOutput, -) -from mlagents.envs.communicator_objects.unity_input_pb2 import UnityInput -from mlagents.envs.communicator_objects.custom_action_pb2 import CustomAction + +from mlagents.envs.communicator_objects.unity_input_pb2 import UnityInputProto +from mlagents.envs.communicator_objects.custom_action_pb2 import CustomActionProto from .rpc_communicator import RpcCommunicator from sys import platform +import signal logging.basicConfig(level=logging.INFO) logger = logging.getLogger("mlagents.envs") @@ -42,6 +42,7 @@ class UnityEnvironment(BaseUnityEnvironment): SCALAR_ACTION_TYPES = (int, np.int32, np.int64, float, np.float32, np.float64) SINGLE_BRAIN_ACTION_TYPES = SCALAR_ACTION_TYPES + (list, np.ndarray) SINGLE_BRAIN_TEXT_TYPES = list + API_VERSION = "API-11" def __init__( self, @@ -72,13 +73,14 @@ def __init__( atexit.register(self._close) self.port = base_port + worker_id self._buffer_size = 12000 - self._version_ = "API-10" + self._version_ = UnityEnvironment.API_VERSION self._loaded = ( False ) # If true, this means the environment was successfully loaded self.proc1 = ( None ) # The process that is started. If None, no process was started + self.timeout_wait: int = timeout_wait self.communicator = self.get_communicator(worker_id, base_port, timeout_wait) self.worker_id = worker_id @@ -98,9 +100,10 @@ def __init__( ) self._loaded = True - rl_init_parameters_in = UnityRLInitializationInput(seed=seed) + rl_init_parameters_in = UnityRLInitializationInputProto(seed=seed) try: - aca_params = self.send_academy_parameters(rl_init_parameters_in) + aca_output = self.send_academy_parameters(rl_init_parameters_in) + aca_params = aca_output.rl_initialization_output except UnityTimeOutException: self._close() raise @@ -118,26 +121,13 @@ def __init__( self._academy_name = aca_params.name self._log_path = aca_params.log_path self._brains: Dict[str, BrainParameters] = {} - self._brain_names: List[str] = [] self._external_brain_names: List[str] = [] - for brain_param in aca_params.brain_parameters: - self._brain_names += [brain_param.brain_name] - self._brains[brain_param.brain_name] = BrainParameters.from_proto( - brain_param - ) - if brain_param.is_training: - self._external_brain_names += [brain_param.brain_name] - self._num_brains = len(self._brain_names) - self._num_external_brains = len(self._external_brain_names) + self._num_external_brains = 0 + self._update_brain_parameters(aca_output) self._resetParameters = dict(aca_params.environment_parameters.float_parameters) logger.info( "\n'{0}' started successfully!\n{1}".format(self._academy_name, str(self)) ) - if self._num_external_brains == 0: - logger.warning( - " No Learning Brains set to train found in the Unity Environment. " - "You will not be able to pass actions to your agent(s)." - ) @property def logfile_path(self): @@ -151,18 +141,10 @@ def brains(self): def academy_name(self): return self._academy_name - @property - def number_brains(self): - return self._num_brains - @property def number_external_brains(self): return self._num_external_brains - @property - def brain_names(self): - return self._brain_names - @property def external_brain_names(self): return self._external_brain_names @@ -249,7 +231,15 @@ def executable_launcher(self, file_name, docker_training, no_graphics, args): subprocess_args += ["--port", str(self.port)] subprocess_args += args try: - self.proc1 = subprocess.Popen(subprocess_args) + self.proc1 = subprocess.Popen( + subprocess_args, + # start_new_session=True means that signals to the parent python process + # (e.g. SIGINT from keyboard interrupt) will not be sent to the new process on POSIX platforms. + # This is generally good since we want the environment to have a chance to shutdown, + # but may be undesirable in come cases; if so, we'll add a command-line toggle. + # Note that on Windows, the CTRL_C signal will still be sent. + start_new_session=True, + ) except PermissionError as perm: # This is likely due to missing read or execute permissions on file. raise UnityEnvironmentException( @@ -291,11 +281,9 @@ def executable_launcher(self, file_name, docker_training, no_graphics, args): def __str__(self): return ( """Unity Academy name: {0} - Number of Brains: {1} - Number of Training Brains : {2} - Reset Parameters :\n\t\t{3}""".format( + Number of Training Brains : {1} + Reset Parameters :\n\t\t{2}""".format( self._academy_name, - str(self._num_brains), str(self._num_external_brains), "\n\t\t".join( [ @@ -346,6 +334,7 @@ def reset( ) if outputs is None: raise UnityCommunicationException("Communicator has stopped.") + self._update_brain_parameters(outputs) rl_output = outputs.rl_output s = self._get_state(rl_output) for _b in self._external_brain_names: @@ -392,7 +381,7 @@ def step( elif self._num_external_brains > 1: raise UnityActionException( "You have {0} brains, you need to feed a dictionary of brain names a keys, " - "and vector_actions as values".format(self._num_brains) + "and vector_actions as values".format(self._num_external_brains) ) else: raise UnityActionException( @@ -406,7 +395,7 @@ def step( elif self._num_external_brains > 1: raise UnityActionException( "You have {0} brains, you need to feed a dictionary of brain names as keys " - "and memories as values".format(self._num_brains) + "and memories as values".format(self._num_external_brains) ) else: raise UnityActionException( @@ -420,7 +409,7 @@ def step( elif self._num_external_brains > 1: raise UnityActionException( "You have {0} brains, you need to feed a dictionary of brain names as keys " - "and text_actions as values".format(self._num_brains) + "and text_actions as values".format(self._num_external_brains) ) else: raise UnityActionException( @@ -435,7 +424,7 @@ def step( raise UnityActionException( "You have {0} brains, you need to feed a dictionary of brain names as keys " "and state/action value estimates as values".format( - self._num_brains + self._num_external_brains ) ) else: @@ -444,13 +433,15 @@ def step( "step cannot take a value input" ) - if isinstance(custom_action, CustomAction): + if isinstance(custom_action, CustomActionProto): if self._num_external_brains == 1: custom_action = {self._external_brain_names[0]: custom_action} elif self._num_external_brains > 1: raise UnityActionException( "You have {0} brains, you need to feed a dictionary of brain names as keys " - "and CustomAction instances as values".format(self._num_brains) + "and CustomAction instances as values".format( + self._num_external_brains + ) ) else: raise UnityActionException( @@ -503,7 +494,7 @@ def step( else: if custom_action[brain_name] is None: custom_action[brain_name] = [None] * n_agent - if isinstance(custom_action[brain_name], CustomAction): + if isinstance(custom_action[brain_name], CustomActionProto): custom_action[brain_name] = [ custom_action[brain_name] ] * n_agent @@ -564,6 +555,7 @@ def step( outputs = self.communicator.exchange(step_input) if outputs is None: raise UnityCommunicationException("Communicator has stopped.") + self._update_brain_parameters(outputs) rl_output = outputs.rl_output state = self._get_state(rl_output) for _b in self._external_brain_names: @@ -583,7 +575,18 @@ def _close(self): self._loaded = False self.communicator.close() if self.proc1 is not None: - self.proc1.kill() + # Wait a bit for the process to shutdown, but kill it if it takes too long + try: + self.proc1.wait(timeout=self.timeout_wait) + signal_name = self.returncode_to_signal_name(self.proc1.returncode) + signal_name = f" ({signal_name})" if signal_name else "" + return_info = f"Environment shut down with return code {self.proc1.returncode}{signal_name}." + logger.info(return_info) + except subprocess.TimeoutExpired: + logger.info("Environment timed out shutting down. Killing...") + self.proc1.kill() + # Set to None so we don't try to close multiple times. + self.proc1 = None @classmethod def _flatten(cls, arr: Any) -> List[float]: @@ -605,7 +608,7 @@ def _flatten(cls, arr: Any) -> List[float]: arr = [float(x) for x in arr] return arr - def _get_state(self, output: UnityRLOutput) -> AllBrainInfo: + def _get_state(self, output: UnityRLOutputProto) -> AllBrainInfo: """ Collects experience information from all external brains in environment at current step. :return: a dictionary of BrainInfo objects. @@ -618,6 +621,21 @@ def _get_state(self, output: UnityRLOutput) -> AllBrainInfo: ) return _data + def _update_brain_parameters(self, output: UnityOutputProto) -> None: + init_output = output.rl_initialization_output + + for brain_param in init_output.brain_parameters: + # Each BrainParameter in the rl_initialization_output should have at least one AgentInfo + # Get that agent, because we need some of its observations. + agent_infos = output.rl_output.agentInfos[brain_param.brain_name] + if agent_infos.value: + agent = agent_infos.value[0] + self._brains[brain_param.brain_name] = BrainParameters.from_proto( + brain_param, agent + ) + self._external_brain_names = list(self._brains.keys()) + self._num_external_brains = len(self._external_brain_names) + @timed def _generate_step_input( self, @@ -626,8 +644,8 @@ def _generate_step_input( text_action: Dict[str, list], value: Dict[str, np.ndarray], custom_action: Dict[str, list], - ) -> UnityInput: - rl_in = UnityRLInput() + ) -> UnityInputProto: + rl_in = UnityRLInputProto() for b in vector_action: n_agents = self._n_agents[b] if n_agents == 0: @@ -650,8 +668,8 @@ def _generate_step_input( def _generate_reset_input( self, training: bool, config: Dict, custom_reset_parameters: Any - ) -> UnityInput: - rl_in = UnityRLInput() + ) -> UnityInputProto: + rl_in = UnityRLInputProto() rl_in.is_training = training rl_in.environment_parameters.CopyFrom(EnvironmentParametersProto()) for key in config: @@ -664,14 +682,28 @@ def _generate_reset_input( return self.wrap_unity_input(rl_in) def send_academy_parameters( - self, init_parameters: UnityRLInitializationInput - ) -> UnityRLInitializationOutput: - inputs = UnityInput() + self, init_parameters: UnityRLInitializationInputProto + ) -> UnityOutputProto: + inputs = UnityInputProto() inputs.rl_initialization_input.CopyFrom(init_parameters) - return self.communicator.initialize(inputs).rl_initialization_output + return self.communicator.initialize(inputs) @staticmethod - def wrap_unity_input(rl_input: UnityRLInput) -> UnityInput: - result = UnityInput() + def wrap_unity_input(rl_input: UnityRLInputProto) -> UnityInputProto: + result = UnityInputProto() result.rl_input.CopyFrom(rl_input) return result + + @staticmethod + def returncode_to_signal_name(returncode: int) -> Optional[str]: + """ + Try to convert return codes into their corresponding signal name. + E.g. returncode_to_signal_name(-2) -> "SIGINT" + """ + try: + # A negative value -N indicates that the child was terminated by signal N (POSIX only). + s = signal.Signals(-returncode) + return s.name + except Exception: + # Should generally be a ValueError, but catch everything just in case. + return None diff --git a/ml-agents-envs/mlagents/envs/mock_communicator.py b/ml-agents-envs/mlagents/envs/mock_communicator.py index 3c621cf667..398af545db 100755 --- a/ml-agents-envs/mlagents/envs/mock_communicator.py +++ b/ml-agents-envs/mlagents/envs/mock_communicator.py @@ -1,15 +1,17 @@ from .communicator import Communicator -from mlagents.envs.communicator_objects.unity_rl_output_pb2 import UnityRLOutput -from mlagents.envs.communicator_objects.brain_parameters_proto_pb2 import ( - BrainParametersProto, -) +from .environment import UnityEnvironment +from mlagents.envs.communicator_objects.unity_rl_output_pb2 import UnityRLOutputProto +from mlagents.envs.communicator_objects.brain_parameters_pb2 import BrainParametersProto from mlagents.envs.communicator_objects.unity_rl_initialization_output_pb2 import ( - UnityRLInitializationOutput, + UnityRLInitializationOutputProto, +) +from mlagents.envs.communicator_objects.unity_input_pb2 import UnityInputProto +from mlagents.envs.communicator_objects.unity_output_pb2 import UnityOutputProto +from mlagents.envs.communicator_objects.agent_info_pb2 import AgentInfoProto +from mlagents.envs.communicator_objects.compressed_observation_pb2 import ( + CompressedObservationProto, + CompressionTypeProto, ) -from mlagents.envs.communicator_objects.unity_input_pb2 import UnityInput -from mlagents.envs.communicator_objects.unity_output_pb2 import UnityOutput -from mlagents.envs.communicator_objects.resolution_proto_pb2 import ResolutionProto -from mlagents.envs.communicator_objects.agent_info_proto_pb2 import AgentInfoProto class MockCommunicator(Communicator): @@ -40,27 +42,26 @@ def __init__( else: self.num_stacks = 1 - def initialize(self, inputs: UnityInput) -> UnityOutput: - resolutions = [ - ResolutionProto(width=30, height=40, gray_scale=False) - for i in range(self.visual_inputs) - ] + def initialize(self, inputs: UnityInputProto) -> UnityOutputProto: bp = BrainParametersProto( vector_observation_size=self.vec_obs_size, num_stacked_vector_observations=self.num_stacks, vector_action_size=[2], - camera_resolutions=resolutions, vector_action_descriptions=["", ""], vector_action_space_type=int(not self.is_discrete), brain_name=self.brain_name, is_training=True, ) - rl_init = UnityRLInitializationOutput( - name="RealFakeAcademy", version="API-10", log_path="", brain_parameters=[bp] + rl_init = UnityRLInitializationOutputProto( + name="RealFakeAcademy", + version=UnityEnvironment.API_VERSION, + log_path="", + brain_parameters=[bp], ) - return UnityOutput(rl_initialization_output=rl_init) + output = UnityRLOutputProto(agentInfos=self._get_agent_infos()) + return UnityOutputProto(rl_initialization_output=rl_init, rl_output=output) - def exchange(self, inputs: UnityInput) -> UnityOutput: + def _get_agent_infos(self): dict_agent_info = {} if self.is_discrete: vector_action = [1] @@ -72,6 +73,13 @@ def exchange(self, inputs: UnityInput) -> UnityOutput: else: observation = [1, 2, 3, 1, 2, 3] + compressed_obs = [ + CompressedObservationProto( + data=None, shape=[30, 40, 3], compression_type=CompressionTypeProto.PNG + ) + for _ in range(self.visual_inputs) + ] + for i in range(self.num_agents): list_agent_info.append( AgentInfoProto( @@ -84,13 +92,17 @@ def exchange(self, inputs: UnityInput) -> UnityOutput: done=(i == 2), max_step_reached=False, id=i, + compressed_observations=compressed_obs, ) ) - dict_agent_info["RealFakeBrain"] = UnityRLOutput.ListAgentInfoProto( + dict_agent_info["RealFakeBrain"] = UnityRLOutputProto.ListAgentInfoProto( value=list_agent_info ) - result = UnityRLOutput(agentInfos=dict_agent_info) - return UnityOutput(rl_output=result) + return dict_agent_info + + def exchange(self, inputs: UnityInputProto) -> UnityOutputProto: + result = UnityRLOutputProto(agentInfos=self._get_agent_infos()) + return UnityOutputProto(rl_output=result) def close(self): """ diff --git a/ml-agents-envs/mlagents/envs/rpc_communicator.py b/ml-agents-envs/mlagents/envs/rpc_communicator.py index 66ea786a1a..9795d98e34 100644 --- a/ml-agents-envs/mlagents/envs/rpc_communicator.py +++ b/ml-agents-envs/mlagents/envs/rpc_communicator.py @@ -8,18 +8,18 @@ from .communicator import Communicator from mlagents.envs.communicator_objects.unity_to_external_pb2_grpc import ( - UnityToExternalServicer, - add_UnityToExternalServicer_to_server, + UnityToExternalProtoServicer, + add_UnityToExternalProtoServicer_to_server, ) -from mlagents.envs.communicator_objects.unity_message_pb2 import UnityMessage -from mlagents.envs.communicator_objects.unity_input_pb2 import UnityInput -from mlagents.envs.communicator_objects.unity_output_pb2 import UnityOutput +from mlagents.envs.communicator_objects.unity_message_pb2 import UnityMessageProto +from mlagents.envs.communicator_objects.unity_input_pb2 import UnityInputProto +from mlagents.envs.communicator_objects.unity_output_pb2 import UnityOutputProto from .exception import UnityTimeOutException, UnityWorkerInUseException logger = logging.getLogger("mlagents.envs") -class UnityToExternalServicerImplementation(UnityToExternalServicer): +class UnityToExternalServicerImplementation(UnityToExternalProtoServicer): def __init__(self): self.parent_conn, self.child_conn = Pipe() @@ -59,7 +59,9 @@ def create_server(self): # Establish communication grpc self.server = grpc.server(ThreadPoolExecutor(max_workers=10)) self.unity_to_external = UnityToExternalServicerImplementation() - add_UnityToExternalServicer_to_server(self.unity_to_external, self.server) + add_UnityToExternalProtoServicer_to_server( + self.unity_to_external, self.server + ) # Using unspecified address, which means that grpc is communicating on all IPs # This is so that the docker container can connect. self.server.add_insecure_port("[::]:" + str(self.port)) @@ -80,25 +82,24 @@ def check_port(self, port): finally: s.close() - def initialize(self, inputs: UnityInput) -> UnityOutput: + def initialize(self, inputs: UnityInputProto) -> UnityOutputProto: if not self.unity_to_external.parent_conn.poll(self.timeout_wait): raise UnityTimeOutException( "The Unity environment took too long to respond. Make sure that :\n" "\t The environment does not need user interaction to launch\n" - "\t The Academy's Broadcast Hub is configured correctly\n" "\t The Agents are linked to the appropriate Brains\n" "\t The environment and the Python interface have compatible versions." ) aca_param = self.unity_to_external.parent_conn.recv().unity_output - message = UnityMessage() + message = UnityMessageProto() message.header.status = 200 message.unity_input.CopyFrom(inputs) self.unity_to_external.parent_conn.send(message) self.unity_to_external.parent_conn.recv() return aca_param - def exchange(self, inputs: UnityInput) -> Optional[UnityOutput]: - message = UnityMessage() + def exchange(self, inputs: UnityInputProto) -> Optional[UnityOutputProto]: + message = UnityMessageProto() message.header.status = 200 message.unity_input.CopyFrom(inputs) self.unity_to_external.parent_conn.send(message) @@ -112,7 +113,7 @@ def close(self): Sends a shutdown signal to the unity environment, and closes the grpc connection. """ if self.is_open: - message_input = UnityMessage() + message_input = UnityMessageProto() message_input.header.status = 400 self.unity_to_external.parent_conn.send(message_input) self.unity_to_external.parent_conn.close() diff --git a/ml-agents-envs/mlagents/envs/socket_communicator.py b/ml-agents-envs/mlagents/envs/socket_communicator.py deleted file mode 100644 index 7eee5e2d8d..0000000000 --- a/ml-agents-envs/mlagents/envs/socket_communicator.py +++ /dev/null @@ -1,102 +0,0 @@ -import logging -import socket -import struct -from typing import Optional - -from .communicator import Communicator -from mlagents.envs.communicator_objects.unity_message_pb2 import UnityMessage -from mlagents.envs.communicator_objects.unity_output_pb2 import UnityOutput -from mlagents.envs.communicator_objects.unity_input_pb2 import UnityInput -from .exception import UnityTimeOutException - - -logger = logging.getLogger("mlagents.envs") - - -class SocketCommunicator(Communicator): - def __init__(self, worker_id=0, base_port=5005): - """ - Python side of the socket communication - - :int base_port: Baseline port number to connect to Unity environment over. worker_id increments over this. - :int worker_id: Number to add to communication port (5005) [0]. Used for asynchronous agent scenarios. - """ - - self.port = base_port + worker_id - self._buffer_size = 12000 - self.worker_id = worker_id - self._socket = None - self._conn = None - - def initialize(self, inputs: UnityInput) -> UnityOutput: - try: - # Establish communication socket - self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - self._socket.bind(("localhost", self.port)) - except Exception: - raise UnityTimeOutException( - "Couldn't start socket communication because worker number {} is still in use. " - "You may need to manually close a previously opened environment " - "or use a different worker number.".format(str(self.worker_id)) - ) - try: - self._socket.settimeout(30) - self._socket.listen(1) - self._conn, _ = self._socket.accept() - self._conn.settimeout(30) - except Exception: - raise UnityTimeOutException( - "The Unity environment took too long to respond. Make sure that :\n" - "\t The environment does not need user interaction to launch\n" - "\t The Academy's Broadcast Hub is configured correctly\n" - "\t The Agents are linked to the appropriate Brains\n" - "\t The environment and the Python interface have compatible versions." - ) - message = UnityMessage() - message.header.status = 200 - message.unity_input.CopyFrom(inputs) - self._communicator_send(message.SerializeToString()) - initialization_output = UnityMessage() - initialization_output.ParseFromString(self._communicator_receive()) - return initialization_output.unity_output - - def _communicator_receive(self): - try: - s = self._conn.recv(self._buffer_size) - message_length = struct.unpack("I", bytearray(s[:4]))[0] - s = s[4:] - while len(s) != message_length: - s += self._conn.recv(self._buffer_size) - except socket.timeout: - raise UnityTimeOutException("The environment took too long to respond.") - return s - - def _communicator_send(self, message): - self._conn.send(struct.pack("I", len(message)) + message) - - def exchange(self, inputs: UnityInput) -> Optional[UnityOutput]: - message = UnityMessage() - message.header.status = 200 - message.unity_input.CopyFrom(inputs) - self._communicator_send(message.SerializeToString()) - outputs = UnityMessage() - outputs.ParseFromString(self._communicator_receive()) - if outputs.header.status != 200: - return None - return outputs.unity_output - - def close(self): - """ - Sends a shutdown signal to the unity environment, and closes the socket connection. - """ - if self._socket is not None and self._conn is not None: - message_input = UnityMessage() - message_input.header.status = 400 - self._communicator_send(message_input.SerializeToString()) - if self._socket is not None: - self._socket.close() - self._socket = None - if self._socket is not None: - self._conn.close() - self._conn = None diff --git a/ml-agents-envs/mlagents/envs/subprocess_env_manager.py b/ml-agents-envs/mlagents/envs/subprocess_env_manager.py index 666d39436b..a91a49650c 100644 --- a/ml-agents-envs/mlagents/envs/subprocess_env_manager.py +++ b/ml-agents-envs/mlagents/envs/subprocess_env_manager.py @@ -119,18 +119,18 @@ def _send_response(cmd_name, payload): elif cmd.name == "close": break except (KeyboardInterrupt, UnityCommunicationException): - print("UnityEnvironment worker: environment stopping.") + logger.info(f"UnityEnvironment worker {worker_id}: environment stopping.") step_queue.put(EnvironmentResponse("env_close", worker_id, None)) finally: # If this worker has put an item in the step queue that hasn't been processed by the EnvManager, the process # will hang until the item is processed. We avoid this behavior by using Queue.cancel_join_thread() # See https://docs.python.org/3/library/multiprocessing.html#multiprocessing.Queue.cancel_join_thread for # more info. - logger.debug(f"Worker {worker_id} closing.") + logger.debug(f"UnityEnvironment worker {worker_id} closing.") step_queue.cancel_join_thread() step_queue.close() env.close() - logger.debug(f"Worker {worker_id} done.") + logger.debug(f"UnityEnvironment worker {worker_id} done.") class SubprocessEnvManager(EnvManager): @@ -202,7 +202,7 @@ def reset( train_mode: bool = True, custom_reset_parameters: Any = None, ) -> List[EnvironmentStep]: - while any([ew.waiting for ew in self.env_workers]): + while any(ew.waiting for ew in self.env_workers): if not self.step_queue.empty(): step = self.step_queue.get_nowait() self.env_workers[step.worker_id].waiting = False diff --git a/ml-agents-envs/mlagents/envs/tests/test_brain.py b/ml-agents-envs/mlagents/envs/tests/test_brain.py new file mode 100644 index 0000000000..73b7b23954 --- /dev/null +++ b/ml-agents-envs/mlagents/envs/tests/test_brain.py @@ -0,0 +1,62 @@ +import logging +import numpy as np +import sys +from unittest import mock + +from mlagents.envs.communicator_objects.agent_info_pb2 import AgentInfoProto +from mlagents.envs.brain import BrainInfo, BrainParameters + +test_brain = BrainParameters( + brain_name="test_brain", + vector_observation_space_size=3, + num_stacked_vector_observations=1, + camera_resolutions=[], + vector_action_space_size=[], + vector_action_descriptions=[], + vector_action_space_type=1, +) + + +@mock.patch.object(np, "nan_to_num", wraps=np.nan_to_num) +@mock.patch.object(logging.Logger, "warning") +def test_from_agent_proto_nan(mock_warning, mock_nan_to_num): + agent_info_proto = AgentInfoProto() + agent_info_proto.stacked_vector_observation.extend([1.0, 2.0, float("nan")]) + + brain_info = BrainInfo.from_agent_proto(1, [agent_info_proto], test_brain) + # nan gets set to 0.0 + expected = [1.0, 2.0, 0.0] + assert (brain_info.vector_observations == expected).all() + mock_nan_to_num.assert_called() + mock_warning.assert_called() + + +@mock.patch.object(np, "nan_to_num", wraps=np.nan_to_num) +@mock.patch.object(logging.Logger, "warning") +def test_from_agent_proto_inf(mock_warning, mock_nan_to_num): + agent_info_proto = AgentInfoProto() + agent_info_proto.stacked_vector_observation.extend([1.0, float("inf"), 0.0]) + + brain_info = BrainInfo.from_agent_proto(1, [agent_info_proto], test_brain) + # inf should get set to float_max + expected = [1.0, sys.float_info.max, 0.0] + assert (brain_info.vector_observations == expected).all() + mock_nan_to_num.assert_called() + # We don't warn on inf, just NaN + mock_warning.assert_not_called() + + +@mock.patch.object(np, "nan_to_num", wraps=np.nan_to_num) +@mock.patch.object(logging.Logger, "warning") +def test_from_agent_proto_fast_path(mock_warning, mock_nan_to_num): + """ + Check that all finite values skips the nan_to_num call + """ + agent_info_proto = AgentInfoProto() + agent_info_proto.stacked_vector_observation.extend([1.0, 2.0, 3.0]) + + brain_info = BrainInfo.from_agent_proto(1, [agent_info_proto], test_brain) + expected = [1.0, 2.0, 3.0] + assert (brain_info.vector_observations == expected).all() + mock_nan_to_num.assert_not_called() + mock_warning.assert_not_called() diff --git a/ml-agents-envs/mlagents/envs/tests/test_envs.py b/ml-agents-envs/mlagents/envs/tests/test_envs.py index b9b57b0ba4..1827937862 100755 --- a/ml-agents-envs/mlagents/envs/tests/test_envs.py +++ b/ml-agents-envs/mlagents/envs/tests/test_envs.py @@ -22,7 +22,7 @@ def test_initialization(mock_communicator, mock_launcher): discrete_action=False, visual_inputs=0 ) env = UnityEnvironment(" ") - assert env.brain_names[0] == "RealFakeBrain" + assert env.external_brain_names[0] == "RealFakeBrain" env.close() @@ -108,5 +108,11 @@ def test_close(mock_communicator, mock_launcher): assert comm.has_been_closed +def test_returncode_to_signal_name(): + assert UnityEnvironment.returncode_to_signal_name(-2) == "SIGINT" + assert UnityEnvironment.returncode_to_signal_name(42) is None + assert UnityEnvironment.returncode_to_signal_name("SIGINT") is None + + if __name__ == "__main__": pytest.main() diff --git a/ml-agents-envs/setup.py b/ml-agents-envs/setup.py index b9fe857023..f2021aee65 100644 --- a/ml-agents-envs/setup.py +++ b/ml-agents-envs/setup.py @@ -1,11 +1,34 @@ +import os +import sys from setuptools import setup -from os import path +from setuptools.command.install import install + +VERSION = "0.11.0" + +here = os.path.abspath(os.path.dirname(__file__)) + + +class VerifyVersionCommand(install): + """ + Custom command to verify that the git tag matches our version + See https://circleci.com/blog/continuously-deploying-python-packages-to-pypi-with-circleci/ + """ + + description = "verify that the git tag matches our version" + + def run(self): + tag = os.getenv("CIRCLE_TAG") + + if tag != VERSION: + info = "Git tag: {0} does not match the version of this app: {1}".format( + tag, VERSION + ) + sys.exit(info) -here = path.abspath(path.dirname(__file__)) setup( name="mlagents_envs", - version="0.10.1", + version=VERSION, description="Unity Machine Learning Agents Interface", url="https://github.com/Unity-Technologies/ml-agents", author="Unity Technologies", @@ -27,4 +50,5 @@ "protobuf>=3.6", ], python_requires=">=3.5", + cmdclass={"verify": VerifyVersionCommand}, ) diff --git a/ml-agents/mlagents/trainers/bc/online_trainer.py b/ml-agents/mlagents/trainers/bc/online_trainer.py deleted file mode 100644 index 73d0e14664..0000000000 --- a/ml-agents/mlagents/trainers/bc/online_trainer.py +++ /dev/null @@ -1,149 +0,0 @@ -# # Unity ML-Agents Toolkit -# ## ML-Agent Learning (Behavioral Cloning) -# Contains an implementation of Behavioral Cloning Algorithm - -import logging -import numpy as np - -from mlagents.envs.brain import AllBrainInfo -from mlagents.envs.action_info import ActionInfoOutputs -from mlagents.trainers.bc.trainer import BCTrainer - -logger = logging.getLogger("mlagents.trainers") - - -class OnlineBCTrainer(BCTrainer): - """The OnlineBCTrainer is an implementation of Online Behavioral Cloning.""" - - def __init__(self, brain, trainer_parameters, training, load, seed, run_id): - """ - Responsible for collecting experiences and training PPO model. - :param trainer_parameters: The parameters for the trainer (dictionary). - :param training: Whether the trainer is set for training. - :param load: Whether the model should be loaded. - :param seed: The seed the model will be initialized with - :param run_id: The identifier of the current run - """ - super(OnlineBCTrainer, self).__init__( - brain, trainer_parameters, training, load, seed, run_id - ) - - self.param_keys = [ - "brain_to_imitate", - "batch_size", - "time_horizon", - "summary_freq", - "max_steps", - "batches_per_epoch", - "use_recurrent", - "hidden_units", - "learning_rate", - "num_layers", - "sequence_length", - "memory_size", - "model_path", - ] - - self.check_param_keys() - self.brain_to_imitate = trainer_parameters["brain_to_imitate"] - self.batches_per_epoch = trainer_parameters["batches_per_epoch"] - self.n_sequences = max( - int(trainer_parameters["batch_size"] / self.policy.sequence_length), 1 - ) - - def add_experiences( - self, - curr_info: AllBrainInfo, - next_info: AllBrainInfo, - take_action_outputs: ActionInfoOutputs, - ) -> None: - """ - Adds experiences to each agent's experience history. - :param curr_info: Current AllBrainInfo (Dictionary of all current brains and corresponding BrainInfo). - :param next_info: Next AllBrainInfo (Dictionary of all current brains and corresponding BrainInfo). - :param take_action_outputs: The outputs of the take action method. - """ - - # Used to collect teacher experience into training buffer - info_teacher = curr_info[self.brain_to_imitate] - next_info_teacher = next_info[self.brain_to_imitate] - for agent_id in info_teacher.agents: - self.demonstration_buffer[agent_id].last_brain_info = info_teacher - - for agent_id in next_info_teacher.agents: - stored_info_teacher = self.demonstration_buffer[agent_id].last_brain_info - if stored_info_teacher is None: - continue - else: - idx = stored_info_teacher.agents.index(agent_id) - next_idx = next_info_teacher.agents.index(agent_id) - if stored_info_teacher.text_observations[idx] != "": - info_teacher_record, info_teacher_reset = ( - stored_info_teacher.text_observations[idx].lower().split(",") - ) - next_info_teacher_record, next_info_teacher_reset = ( - next_info_teacher.text_observations[idx].lower().split(",") - ) - if next_info_teacher_reset == "true": - self.demonstration_buffer.reset_update_buffer() - else: - info_teacher_record, next_info_teacher_record = "true", "true" - if info_teacher_record == "true" and next_info_teacher_record == "true": - if not stored_info_teacher.local_done[idx]: - for i in range(self.policy.vis_obs_size): - self.demonstration_buffer[agent_id][ - "visual_obs%d" % i - ].append(stored_info_teacher.visual_observations[i][idx]) - if self.policy.use_vec_obs: - self.demonstration_buffer[agent_id]["vector_obs"].append( - stored_info_teacher.vector_observations[idx] - ) - if self.policy.use_recurrent: - if stored_info_teacher.memories.shape[1] == 0: - stored_info_teacher.memories = np.zeros( - ( - len(stored_info_teacher.agents), - self.policy.m_size, - ) - ) - self.demonstration_buffer[agent_id]["memory"].append( - stored_info_teacher.memories[idx] - ) - self.demonstration_buffer[agent_id]["actions"].append( - next_info_teacher.previous_vector_actions[next_idx] - ) - - super(OnlineBCTrainer, self).add_experiences( - curr_info, next_info, take_action_outputs - ) - - def process_experiences( - self, current_info: AllBrainInfo, next_info: AllBrainInfo - ) -> None: - """ - Checks agent histories for processing condition, and processes them as necessary. - Processing involves calculating value and advantage targets for model updating step. - :param current_info: Current AllBrainInfo - :param next_info: Next AllBrainInfo - """ - info_teacher = next_info[self.brain_to_imitate] - for l in range(len(info_teacher.agents)): - teacher_action_list = len( - self.demonstration_buffer[info_teacher.agents[l]]["actions"] - ) - horizon_reached = ( - teacher_action_list > self.trainer_parameters["time_horizon"] - ) - teacher_filled = ( - len(self.demonstration_buffer[info_teacher.agents[l]]["actions"]) > 0 - ) - if (info_teacher.local_done[l] or horizon_reached) and teacher_filled: - agent_id = info_teacher.agents[l] - self.demonstration_buffer.append_update_buffer( - agent_id, - batch_size=None, - training_length=self.policy.sequence_length, - ) - self.demonstration_buffer[agent_id].reset_agent() - - super(OnlineBCTrainer, self).process_experiences(current_info, next_info) diff --git a/ml-agents/mlagents/trainers/components/reward_signals/curiosity/signal.py b/ml-agents/mlagents/trainers/components/reward_signals/curiosity/signal.py index 6a98ac1144..52542fc0dd 100644 --- a/ml-agents/mlagents/trainers/components/reward_signals/curiosity/signal.py +++ b/ml-agents/mlagents/trainers/components/reward_signals/curiosity/signal.py @@ -125,7 +125,7 @@ def prepare_update( policy_model.mask_input: mini_batch["masks"], } if self.policy.use_continuous_act: - feed_dict[policy_model.output_pre] = mini_batch["actions_pre"] + feed_dict[policy_model.selected_actions] = mini_batch["actions"] else: feed_dict[policy_model.action_holder] = mini_batch["actions"] if self.policy.use_vec_obs: diff --git a/ml-agents/mlagents/trainers/demo_loader.py b/ml-agents/mlagents/trainers/demo_loader.py index 14aa006bb7..afba4941ec 100644 --- a/ml-agents/mlagents/trainers/demo_loader.py +++ b/ml-agents/mlagents/trainers/demo_loader.py @@ -4,11 +4,9 @@ from typing import List, Tuple from mlagents.trainers.buffer import Buffer from mlagents.envs.brain import BrainParameters, BrainInfo -from mlagents.envs.communicator_objects.agent_info_proto_pb2 import AgentInfoProto -from mlagents.envs.communicator_objects.brain_parameters_proto_pb2 import ( - BrainParametersProto, -) -from mlagents.envs.communicator_objects.demonstration_meta_proto_pb2 import ( +from mlagents.envs.communicator_objects.agent_info_pb2 import AgentInfoProto +from mlagents.envs.communicator_objects.brain_parameters_pb2 import BrainParametersProto +from mlagents.envs.communicator_objects.demonstration_meta_pb2 import ( DemonstrationMetaProto, ) from google.protobuf.internal.decoder import _DecodeVarint32 # type: ignore @@ -98,6 +96,7 @@ def load_demonstration(file_path: str) -> Tuple[BrainParameters, List[BrainInfo] ) brain_params = None + brain_param_proto = None brain_infos = [] total_expected = 0 for _file_path in file_paths: @@ -113,11 +112,15 @@ def load_demonstration(file_path: str) -> Tuple[BrainParameters, List[BrainInfo] if obs_decoded == 1: brain_param_proto = BrainParametersProto() brain_param_proto.ParseFromString(data[pos : pos + next_pos]) - brain_params = BrainParameters.from_proto(brain_param_proto) + pos += next_pos if obs_decoded > 1: agent_info = AgentInfoProto() agent_info.ParseFromString(data[pos : pos + next_pos]) + if brain_params is None: + brain_params = BrainParameters.from_proto( + brain_param_proto, agent_info + ) brain_info = BrainInfo.from_agent_proto(0, [agent_info], brain_params) brain_infos.append(brain_info) if len(brain_infos) == total_expected: diff --git a/ml-agents/mlagents/trainers/learn.py b/ml-agents/mlagents/trainers/learn.py index 3376462779..5dc0d709c7 100644 --- a/ml-agents/mlagents/trainers/learn.py +++ b/ml-agents/mlagents/trainers/learn.py @@ -1,5 +1,4 @@ # # Unity ML-Agents Toolkit - import logging import argparse @@ -14,8 +13,8 @@ from mlagents.trainers.trainer_controller import TrainerController from mlagents.trainers.exception import TrainerError -from mlagents.trainers.meta_curriculum import MetaCurriculumError, MetaCurriculum -from mlagents.trainers.trainer_util import initialize_trainers, load_config +from mlagents.trainers.meta_curriculum import MetaCurriculum +from mlagents.trainers.trainer_util import load_config, TrainerFactory from mlagents.envs.environment import UnityEnvironment from mlagents.envs.sampler_class import SamplerManager from mlagents.envs.exception import SamplerException @@ -44,6 +43,7 @@ class CommandLineOptions(NamedTuple): sampler_file_path: Optional[str] docker_target_name: Optional[str] env_args: Optional[List[str]] + cpu: bool @property def fast_simulation(self) -> bool: @@ -156,6 +156,9 @@ def parse_command_line(argv: Optional[List[str]] = None) -> CommandLineOptions: nargs=argparse.REMAINDER, help="Arguments passed to the Unity executable.", ) + parser.add_argument( + "--cpu", default=False, action="store_true", help="Run with CPU only" + ) args = parser.parse_args(argv) return CommandLineOptions.from_argparse(args) @@ -173,10 +176,8 @@ def run_training( :param run_options: Command line arguments for training. """ # Docker Parameters - trainer_config_path = options.trainer_config_path curriculum_folder = options.curriculum_folder - # Recognize and use docker volume if one is passed as an argument if not options.docker_target_name: model_path = "./models/{run_id}-{sub_id}".format( @@ -201,14 +202,16 @@ def run_training( summaries_dir = "/{docker_target_name}/summaries".format( docker_target_name=options.docker_target_name ) - trainer_config = load_config(trainer_config_path) + port = options.base_port + (sub_id * options.num_envs) + if options.env_path is None: + port = 5004 # This is the in Editor Training Port env_factory = create_environment_factory( options.env_path, options.docker_target_name, options.no_graphics, run_seed, - options.base_port + (sub_id * options.num_envs), + port, options.env_args, ) env = SubprocessEnvManager(env_factory, options.num_envs) @@ -218,10 +221,8 @@ def run_training( sampler_manager, resampling_interval = create_sampler_manager( options.sampler_file_path, env.reset_parameters, run_seed ) - - trainers = initialize_trainers( + trainer_factory = TrainerFactory( trainer_config, - env.external_brains, summaries_dir, options.run_id, model_path, @@ -232,10 +233,9 @@ def run_training( maybe_meta_curriculum, options.multi_gpu, ) - # Create controller and begin training. tc = TrainerController( - trainers, + trainer_factory, model_path, summaries_dir, options.run_id + "-" + str(sub_id), @@ -247,10 +247,8 @@ def run_training( sampler_manager, resampling_interval, ) - # Signal that environment has been launched. process_queue.put(True) - # Begin training tc.start_learning(env) @@ -268,11 +266,13 @@ def create_sampler_manager(sampler_file_path, env_reset_params, run_seed=None): "Specified resampling-interval is not valid. Please provide" " a positive integer value for resampling-interval" ) + else: raise SamplerException( "Resampling interval was not specified in the sampler file." " Please specify it with the 'resampling-interval' key in the sampler config file." ) + sampler_manager = SamplerManager(sampler_config, run_seed) return sampler_manager, resample_interval @@ -282,22 +282,13 @@ def try_create_meta_curriculum( ) -> Optional[MetaCurriculum]: if curriculum_folder is None: return None + else: meta_curriculum = MetaCurriculum(curriculum_folder, env.reset_parameters) # TODO: Should be able to start learning at different lesson numbers # for each curriculum. meta_curriculum.set_all_curriculums_to_lesson_num(lesson) - for brain_name in meta_curriculum.brains_to_curriculums.keys(): - if brain_name not in env.external_brains.keys(): - raise MetaCurriculumError( - "One of the curricula " - "defined in " + curriculum_folder + " " - "does not have a corresponding " - "Brain. Check that the " - "curriculum file has the same " - "name as the Brain " - "whose curriculum it defines." - ) + return meta_curriculum @@ -393,7 +384,6 @@ def main(): ) except Exception: print("\n\n\tUnity Technologies\n") - options = parse_command_line() trainer_logger = logging.getLogger("mlagents.trainers") env_logger = logging.getLogger("mlagents.envs") @@ -401,7 +391,6 @@ def main(): if options.debug: trainer_logger.setLevel("DEBUG") env_logger.setLevel("DEBUG") - if options.env_path is None and options.num_runs > 1: raise TrainerError( "It is not possible to launch more than one concurrent training session " @@ -410,6 +399,8 @@ def main(): jobs = [] run_seed = options.seed + if options.cpu: + os.environ["CUDA_VISIBLE_DEVICES"] = "-1" if options.num_runs == 1: if options.seed == -1: diff --git a/ml-agents/mlagents/trainers/meta_curriculum.py b/ml-agents/mlagents/trainers/meta_curriculum.py index 6c93ddbd0a..0d5ad6b8b5 100644 --- a/ml-agents/mlagents/trainers/meta_curriculum.py +++ b/ml-agents/mlagents/trainers/meta_curriculum.py @@ -1,6 +1,7 @@ """Contains the MetaCurriculum class.""" import os +from typing import Any, Dict, Set from mlagents.trainers.curriculum import Curriculum from mlagents.trainers.exception import MetaCurriculumError @@ -14,7 +15,9 @@ class MetaCurriculum(object): particular brain in the environment. """ - def __init__(self, curriculum_folder, default_reset_parameters): + def __init__( + self, curriculum_folder: str, default_reset_parameters: Dict[str, Any] + ): """Initializes a MetaCurriculum object. Args: @@ -25,24 +28,23 @@ def __init__(self, curriculum_folder, default_reset_parameters): default_reset_parameters (dict): The default reset parameters of the environment. """ - used_reset_parameters = set() - self._brains_to_curriculums = {} + used_reset_parameters: Set[str] = set() + self._brains_to_curriculums: Dict[str, Curriculum] = {} try: for curriculum_filename in os.listdir(curriculum_folder): + # This process requires JSON files + if not curriculum_filename.lower().endswith(".json"): + continue brain_name = curriculum_filename.split(".")[0] curriculum_filepath = os.path.join( curriculum_folder, curriculum_filename ) curriculum = Curriculum(curriculum_filepath, default_reset_parameters) + config_keys: Set[str] = set(curriculum.get_config().keys()) # Check if any two curriculums use the same reset params. - if any( - [ - (parameter in curriculum.get_config().keys()) - for parameter in used_reset_parameters - ] - ): + if config_keys & used_reset_parameters: logger.warning( "Two or more curriculums will " "attempt to change the same reset " @@ -50,7 +52,7 @@ def __init__(self, curriculum_folder, default_reset_parameters): "non-deterministic." ) - used_reset_parameters.update(curriculum.get_config().keys()) + used_reset_parameters.update(config_keys) self._brains_to_curriculums[brain_name] = curriculum except NotADirectoryError: raise MetaCurriculumError( diff --git a/ml-agents/mlagents/trainers/models.py b/ml-agents/mlagents/trainers/models.py index 6a6f72c270..3896fbb567 100644 --- a/ml-agents/mlagents/trainers/models.py +++ b/ml-agents/mlagents/trainers/models.py @@ -1,12 +1,13 @@ import logging from enum import Enum -from typing import Any, Callable, Dict, List +from typing import Callable, List import numpy as np import tensorflow as tf import tensorflow.contrib.layers as c_layers from mlagents.trainers.trainer import UnityTrainerException +from mlagents.envs.brain import CameraResolution logger = logging.getLogger("mlagents.trainers") @@ -126,21 +127,18 @@ def swish(input_activation: tf.Tensor) -> tf.Tensor: return tf.multiply(input_activation, tf.nn.sigmoid(input_activation)) @staticmethod - def create_visual_input(camera_parameters: Dict[str, Any], name: str) -> tf.Tensor: + def create_visual_input( + camera_parameters: CameraResolution, name: str + ) -> tf.Tensor: """ Creates image input op. :param camera_parameters: Parameters for visual observation from BrainInfo. :param name: Desired name of input op. :return: input op. """ - o_size_h = camera_parameters["height"] - o_size_w = camera_parameters["width"] - bw = camera_parameters["blackAndWhite"] - - if bw: - c_channels = 1 - else: - c_channels = 3 + o_size_h = camera_parameters.height + o_size_w = camera_parameters.width + c_channels = camera_parameters.num_channels visual_in = tf.placeholder( shape=[None, o_size_h, o_size_w, c_channels], dtype=tf.float32, name=name @@ -247,8 +245,8 @@ def create_vector_observation_encoder( ) return hidden + @staticmethod def create_visual_observation_encoder( - self, image_input: tf.Tensor, h_size: int, activation: ActivationFunction, @@ -288,13 +286,13 @@ def create_visual_observation_encoder( hidden = c_layers.flatten(conv2) with tf.variable_scope(scope + "/" + "flat_encoding"): - hidden_flat = self.create_vector_observation_encoder( + hidden_flat = LearningModel.create_vector_observation_encoder( hidden, h_size, activation, num_layers, scope, reuse ) return hidden_flat + @staticmethod def create_nature_cnn_visual_observation_encoder( - self, image_input: tf.Tensor, h_size: int, activation: ActivationFunction, @@ -343,13 +341,13 @@ def create_nature_cnn_visual_observation_encoder( hidden = c_layers.flatten(conv3) with tf.variable_scope(scope + "/" + "flat_encoding"): - hidden_flat = self.create_vector_observation_encoder( + hidden_flat = LearningModel.create_vector_observation_encoder( hidden, h_size, activation, num_layers, scope, reuse ) return hidden_flat + @staticmethod def create_resnet_visual_observation_encoder( - self, image_input: tf.Tensor, h_size: int, activation: ActivationFunction, @@ -411,7 +409,7 @@ def create_resnet_visual_observation_encoder( hidden = c_layers.flatten(hidden) with tf.variable_scope(scope + "/" + "flat_encoding"): - hidden_flat = self.create_vector_observation_encoder( + hidden_flat = LearningModel.create_vector_observation_encoder( hidden, h_size, activation, num_layers, scope, reuse ) return hidden_flat @@ -470,7 +468,7 @@ def create_observation_streams( num_layers: int, vis_encode_type: EncoderType = EncoderType.SIMPLE, stream_scopes: List[str] = None, - ) -> tf.Tensor: + ) -> List[tf.Tensor]: """ Creates encoding stream for observations. :param num_streams: Number of streams to create. @@ -491,45 +489,31 @@ def create_observation_streams( self.visual_in.append(visual_input) vector_observation_input = self.create_vector_input() + # Pick the encoder function based on the EncoderType + create_encoder_func = LearningModel.create_visual_observation_encoder + if vis_encode_type == EncoderType.RESNET: + create_encoder_func = LearningModel.create_resnet_visual_observation_encoder + elif vis_encode_type == EncoderType.NATURE_CNN: + create_encoder_func = ( + LearningModel.create_nature_cnn_visual_observation_encoder + ) + final_hiddens = [] for i in range(num_streams): visual_encoders = [] hidden_state, hidden_visual = None, None _scope_add = stream_scopes[i] if stream_scopes else "" if self.vis_obs_size > 0: - if vis_encode_type == EncoderType.RESNET: - for j in range(brain.number_visual_observations): - encoded_visual = self.create_resnet_visual_observation_encoder( - self.visual_in[j], - h_size, - activation_fn, - num_layers, - _scope_add + "main_graph_{}_encoder{}".format(i, j), - False, - ) - visual_encoders.append(encoded_visual) - elif vis_encode_type == EncoderType.NATURE_CNN: - for j in range(brain.number_visual_observations): - encoded_visual = self.create_nature_cnn_visual_observation_encoder( - self.visual_in[j], - h_size, - activation_fn, - num_layers, - _scope_add + "main_graph_{}_encoder{}".format(i, j), - False, - ) - visual_encoders.append(encoded_visual) - else: - for j in range(brain.number_visual_observations): - encoded_visual = self.create_visual_observation_encoder( - self.visual_in[j], - h_size, - activation_fn, - num_layers, - _scope_add + "main_graph_{}_encoder{}".format(i, j), - False, - ) - visual_encoders.append(encoded_visual) + for j in range(brain.number_visual_observations): + encoded_visual = create_encoder_func( + self.visual_in[j], + h_size, + activation_fn, + num_layers, + scope=f"{_scope_add}main_graph_{i}_encoder{j}", + reuse=False, + ) + visual_encoders.append(encoded_visual) hidden_visual = tf.concat(visual_encoders, axis=1) if brain.vector_observation_space_size > 0: hidden_state = self.create_vector_observation_encoder( @@ -537,8 +521,8 @@ def create_observation_streams( h_size, activation_fn, num_layers, - _scope_add + "main_graph_{}".format(i), - False, + scope=f"{_scope_add}main_graph_{i}", + reuse=False, ) if hidden_state is not None and hidden_visual is not None: final_hidden = tf.concat([hidden_visual, hidden_state], axis=1) diff --git a/ml-agents/mlagents/trainers/ppo/trainer.py b/ml-agents/mlagents/trainers/ppo/trainer.py index 73c4335c4c..19d6a17eff 100644 --- a/ml-agents/mlagents/trainers/ppo/trainer.py +++ b/ml-agents/mlagents/trainers/ppo/trainer.py @@ -269,7 +269,7 @@ def update_policy(self): update_stats = self.policy.bc_module.update() for stat, val in update_stats.items(): self.stats[stat].append(val) - self.training_buffer.reset_update_buffer() + self.clear_update_buffer() self.trainer_metrics.end_policy_update() diff --git a/ml-agents/mlagents/trainers/rl_trainer.py b/ml-agents/mlagents/trainers/rl_trainer.py index 2ba79ffb08..ce3bb9e377 100644 --- a/ml-agents/mlagents/trainers/rl_trainer.py +++ b/ml-agents/mlagents/trainers/rl_trainer.py @@ -244,6 +244,13 @@ def end_episode(self) -> None: for agent_id in rewards: rewards[agent_id] = 0 + def clear_update_buffer(self) -> None: + """ + Clear the buffers that have been built up during inference. If + we're not training, this should be called instead of update_policy. + """ + self.training_buffer.reset_update_buffer() + def add_policy_outputs( self, take_action_outputs: ActionInfoOutputs, agent_id: str, agent_idx: int ) -> None: diff --git a/ml-agents/mlagents/trainers/sac/policy.py b/ml-agents/mlagents/trainers/sac/policy.py index 3245eafd6a..b8b996a427 100644 --- a/ml-agents/mlagents/trainers/sac/policy.py +++ b/ml-agents/mlagents/trainers/sac/policy.py @@ -113,7 +113,7 @@ def create_model( seed=seed, stream_names=list(reward_signal_configs.keys()), tau=float(trainer_params["tau"]), - gammas=list(_val["gamma"] for _val in reward_signal_configs.values()), + gammas=[_val["gamma"] for _val in reward_signal_configs.values()], vis_encode_type=EncoderType( trainer_params.get("vis_encode_type", "simple") ), diff --git a/ml-agents/mlagents/trainers/sac/trainer.py b/ml-agents/mlagents/trainers/sac/trainer.py index 3fcb929850..c82a47e521 100644 --- a/ml-agents/mlagents/trainers/sac/trainer.py +++ b/ml-agents/mlagents/trainers/sac/trainer.py @@ -329,10 +329,10 @@ def update_reward_signals(self) -> None: self.trainer_parameters["batch_size"], sequence_length=self.policy.sequence_length, ) - update_stats = self.policy.update_reward_signals( - reward_signal_minibatches, n_sequences - ) - for stat_name, value in update_stats.items(): - batch_update_stats[stat_name].append(value) + update_stats = self.policy.update_reward_signals( + reward_signal_minibatches, n_sequences + ) + for stat_name, value in update_stats.items(): + batch_update_stats[stat_name].append(value) for stat, stat_list in batch_update_stats.items(): self.stats[stat].append(np.mean(stat_list)) diff --git a/ml-agents/mlagents/trainers/tensorflow_to_barracuda.py b/ml-agents/mlagents/trainers/tensorflow_to_barracuda.py index 3e7ec4b0fe..521252c0b0 100644 --- a/ml-agents/mlagents/trainers/tensorflow_to_barracuda.py +++ b/ml-agents/mlagents/trainers/tensorflow_to_barracuda.py @@ -178,6 +178,7 @@ "OneHot": Struct(id=67, rank=lambda inputs: inputs[0] + 1), # Broadcast ops "Add": Struct(id=100, rank=lambda inputs: np.max(inputs)), + "AddV2": Struct(id=100, rank=lambda inputs: np.max(inputs)), "Sub": Struct(id=101, rank=lambda inputs: np.max(inputs)), "Mul": Struct(id=102, rank=lambda inputs: np.max(inputs)), "RealDiv": Struct(id=103, rank=lambda inputs: np.max(inputs)), diff --git a/ml-agents/mlagents/trainers/tests/mock_brain.py b/ml-agents/mlagents/trainers/tests/mock_brain.py index 5a1fa12aeb..aaf03e8f04 100644 --- a/ml-agents/mlagents/trainers/tests/mock_brain.py +++ b/ml-agents/mlagents/trainers/tests/mock_brain.py @@ -1,6 +1,7 @@ import unittest.mock as mock import numpy as np +from mlagents.envs.brain import CameraResolution, BrainParameters from mlagents.trainers.buffer import Buffer @@ -26,7 +27,7 @@ def create_mock_brainparams( mock_brain.return_value.vector_observation_space_size = ( vector_observation_space_size ) - camrez = {"blackAndWhite": False, "height": 84, "width": 84} + camrez = CameraResolution(height=84, width=84, num_channels=3) mock_brain.return_value.camera_resolutions = [camrez] * number_visual_observations mock_brain.return_value.vector_action_space_size = vector_action_space_size mock_brain.return_value.brain_name = "MockBrain" @@ -94,16 +95,20 @@ def setup_mock_unityenvironment(mock_env, mock_brain, mock_braininfo): mock_env.return_value.academy_name = "MockAcademy" mock_env.return_value.brains = {brain_name: mock_brain} mock_env.return_value.external_brain_names = [brain_name] - mock_env.return_value.brain_names = [brain_name] mock_env.return_value.reset.return_value = {brain_name: mock_braininfo} mock_env.return_value.step.return_value = {brain_name: mock_braininfo} -def simulate_rollout(env, policy, buffer_init_samples): +def simulate_rollout(env, policy, buffer_init_samples, exclude_key_list=None): brain_info_list = [] for i in range(buffer_init_samples): - brain_info_list.append(env.step()[env.brain_names[0]]) + brain_info_list.append(env.step()[env.external_brain_names[0]]) buffer = create_buffer(brain_info_list, policy.brain, policy.sequence_length) + # If a key_list was given, remove those keys + if exclude_key_list: + for key in exclude_key_list: + if key in buffer.update_buffer: + buffer.update_buffer.pop(key) return buffer @@ -230,3 +235,26 @@ def create_mock_banana_brain(): vector_observation_space_size=0, ) return mock_brain + + +def make_brain_parameters( + discrete_action: bool = False, + visual_inputs: int = 0, + stack: bool = True, + brain_name: str = "RealFakeBrain", + vec_obs_size: int = 3, +) -> BrainParameters: + resolutions = [ + CameraResolution(width=30, height=40, num_channels=3) + for _ in range(visual_inputs) + ] + + return BrainParameters( + vector_observation_space_size=vec_obs_size, + num_stacked_vector_observations=2 if stack else 1, + camera_resolutions=resolutions, + vector_action_space_size=[2], + vector_action_descriptions=["", ""], + vector_action_space_type=int(not discrete_action), + brain_name=brain_name, + ) diff --git a/ml-agents/mlagents/trainers/tests/test_bc.py b/ml-agents/mlagents/trainers/tests/test_bc.py index 4a44ea4983..12acf5c5b9 100644 --- a/ml-agents/mlagents/trainers/tests/test_bc.py +++ b/ml-agents/mlagents/trainers/tests/test_bc.py @@ -12,6 +12,7 @@ from mlagents.trainers.bc.offline_trainer import BCTrainer from mlagents.envs.environment import UnityEnvironment from mlagents.envs.mock_communicator import MockCommunicator +from mlagents.trainers.tests.mock_brain import make_brain_parameters @pytest.fixture @@ -114,30 +115,28 @@ def test_bc_policy_evaluate(mock_communicator, mock_launcher, dummy_config): ) env = UnityEnvironment(" ") brain_infos = env.reset() - brain_info = brain_infos[env.brain_names[0]] + brain_info = brain_infos[env.external_brain_names[0]] trainer_parameters = dummy_config - model_path = env.brain_names[0] + model_path = env.external_brain_names[0] trainer_parameters["model_path"] = model_path trainer_parameters["keep_checkpoints"] = 3 - policy = BCPolicy(0, env.brains[env.brain_names[0]], trainer_parameters, False) + policy = BCPolicy( + 0, env.brains[env.external_brain_names[0]], trainer_parameters, False + ) run_out = policy.evaluate(brain_info) assert run_out["action"].shape == (3, 2) env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_cc_bc_model(mock_communicator, mock_launcher): +def test_cc_bc_model(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=False, visual_inputs=0 + model = BehavioralCloningModel( + make_brain_parameters(discrete_action=False, visual_inputs=0) ) - env = UnityEnvironment(" ") - model = BehavioralCloningModel(env.brains["RealFakeBrain"]) init = tf.global_variables_initializer() sess.run(init) @@ -148,20 +147,16 @@ def test_cc_bc_model(mock_communicator, mock_launcher): model.vector_in: np.array([[1, 2, 3, 1, 2, 3], [3, 4, 5, 3, 4, 5]]), } sess.run(run_list, feed_dict=feed_dict) - env.close() + # env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_dc_bc_model(mock_communicator, mock_launcher): +def test_dc_bc_model(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=True, visual_inputs=0 + model = BehavioralCloningModel( + make_brain_parameters(discrete_action=True, visual_inputs=0) ) - env = UnityEnvironment(" ") - model = BehavioralCloningModel(env.brains["RealFakeBrain"]) init = tf.global_variables_initializer() sess.run(init) @@ -174,20 +169,15 @@ def test_dc_bc_model(mock_communicator, mock_launcher): model.action_masks: np.ones([2, 2]), } sess.run(run_list, feed_dict=feed_dict) - env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_visual_dc_bc_model(mock_communicator, mock_launcher): +def test_visual_dc_bc_model(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=True, visual_inputs=2 + model = BehavioralCloningModel( + make_brain_parameters(discrete_action=True, visual_inputs=2) ) - env = UnityEnvironment(" ") - model = BehavioralCloningModel(env.brains["RealFakeBrain"]) init = tf.global_variables_initializer() sess.run(init) @@ -202,20 +192,15 @@ def test_visual_dc_bc_model(mock_communicator, mock_launcher): model.action_masks: np.ones([2, 2]), } sess.run(run_list, feed_dict=feed_dict) - env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_visual_cc_bc_model(mock_communicator, mock_launcher): +def test_visual_cc_bc_model(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=False, visual_inputs=2 + model = BehavioralCloningModel( + make_brain_parameters(discrete_action=False, visual_inputs=2) ) - env = UnityEnvironment(" ") - model = BehavioralCloningModel(env.brains["RealFakeBrain"]) init = tf.global_variables_initializer() sess.run(init) @@ -228,7 +213,6 @@ def test_visual_cc_bc_model(mock_communicator, mock_launcher): model.visual_in[1]: np.ones([2, 40, 30, 3]), } sess.run(run_list, feed_dict=feed_dict) - env.close() if __name__ == "__main__": diff --git a/ml-agents/mlagents/trainers/tests/test_bcmodule.py b/ml-agents/mlagents/trainers/tests/test_bcmodule.py index b20a8eda9b..24f7b7efb7 100644 --- a/ml-agents/mlagents/trainers/tests/test_bcmodule.py +++ b/ml-agents/mlagents/trainers/tests/test_bcmodule.py @@ -83,7 +83,7 @@ def create_policy_with_bc_mock( mb.setup_mock_unityenvironment(mock_env, mock_brain, mock_braininfo) env = mock_env() - model_path = env.brain_names[0] + model_path = env.external_brain_names[0] trainer_config["model_path"] = model_path trainer_config["keep_checkpoints"] = 3 trainer_config["use_recurrent"] = use_rnn diff --git a/ml-agents/mlagents/trainers/tests/test_learn.py b/ml-agents/mlagents/trainers/tests/test_learn.py index 631336a5af..2bdb19c22c 100644 --- a/ml-agents/mlagents/trainers/tests/test_learn.py +++ b/ml-agents/mlagents/trainers/tests/test_learn.py @@ -14,12 +14,17 @@ def basic_options(extra_args=None): return parse_command_line(args) +@patch("mlagents.trainers.learn.TrainerFactory") @patch("mlagents.trainers.learn.SamplerManager") @patch("mlagents.trainers.learn.SubprocessEnvManager") @patch("mlagents.trainers.learn.create_environment_factory") @patch("mlagents.trainers.learn.load_config") def test_run_training( - load_config, create_environment_factory, subproc_env_mock, sampler_manager_mock + load_config, + create_environment_factory, + subproc_env_mock, + sampler_manager_mock, + trainer_factory_mock, ): mock_env = MagicMock() mock_env.external_brain_names = [] @@ -33,7 +38,7 @@ def test_run_training( with patch.object(TrainerController, "start_learning", MagicMock()): learn.run_training(0, 0, basic_options(), MagicMock()) mock_init.assert_called_once_with( - {}, + trainer_factory_mock.return_value, "./models/ppo-0", "./summaries", "ppo-0", diff --git a/ml-agents/mlagents/trainers/tests/test_ppo.py b/ml-agents/mlagents/trainers/tests/test_ppo.py index 7321f650ed..441ed6a3a8 100644 --- a/ml-agents/mlagents/trainers/tests/test_ppo.py +++ b/ml-agents/mlagents/trainers/tests/test_ppo.py @@ -14,6 +14,7 @@ from mlagents.envs.environment import UnityEnvironment from mlagents.envs.mock_communicator import MockCommunicator from mlagents.trainers.tests import mock_brain as mb +from mlagents.trainers.tests.mock_brain import make_brain_parameters @pytest.fixture @@ -65,14 +66,14 @@ def test_ppo_policy_evaluate(mock_communicator, mock_launcher, dummy_config): ) env = UnityEnvironment(" ") brain_infos = env.reset() - brain_info = brain_infos[env.brain_names[0]] + brain_info = brain_infos[env.external_brain_names[0]] trainer_parameters = dummy_config - model_path = env.brain_names[0] + model_path = env.external_brain_names[0] trainer_parameters["model_path"] = model_path trainer_parameters["keep_checkpoints"] = 3 policy = PPOPolicy( - 0, env.brains[env.brain_names[0]], trainer_parameters, False, False + 0, env.brains[env.external_brain_names[0]], trainer_parameters, False, False ) run_out = policy.evaluate(brain_info) assert run_out["action"].shape == (3, 2) @@ -88,14 +89,14 @@ def test_ppo_get_value_estimates(mock_communicator, mock_launcher, dummy_config) ) env = UnityEnvironment(" ") brain_infos = env.reset() - brain_info = brain_infos[env.brain_names[0]] + brain_info = brain_infos[env.external_brain_names[0]] trainer_parameters = dummy_config - model_path = env.brain_names[0] + model_path = env.external_brain_names[0] trainer_parameters["model_path"] = model_path trainer_parameters["keep_checkpoints"] = 3 policy = PPOPolicy( - 0, env.brains[env.brain_names[0]], trainer_parameters, False, False + 0, env.brains[env.external_brain_names[0]], trainer_parameters, False, False ) run_out = policy.get_value_estimates(brain_info, 0, done=False) for key, val in run_out.items(): @@ -117,18 +118,13 @@ def test_ppo_get_value_estimates(mock_communicator, mock_launcher, dummy_config) env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_ppo_model_cc_vector(mock_communicator, mock_launcher): +def test_ppo_model_cc_vector(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=False, visual_inputs=0 + model = PPOModel( + make_brain_parameters(discrete_action=False, visual_inputs=0) ) - env = UnityEnvironment(" ") - - model = PPOModel(env.brains["RealFakeBrain"]) init = tf.global_variables_initializer() sess.run(init) @@ -146,21 +142,15 @@ def test_ppo_model_cc_vector(mock_communicator, mock_launcher): model.epsilon: np.array([[0, 1], [2, 3]]), } sess.run(run_list, feed_dict=feed_dict) - env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_ppo_model_cc_visual(mock_communicator, mock_launcher): +def test_ppo_model_cc_visual(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=False, visual_inputs=2 + model = PPOModel( + make_brain_parameters(discrete_action=False, visual_inputs=2) ) - env = UnityEnvironment(" ") - - model = PPOModel(env.brains["RealFakeBrain"]) init = tf.global_variables_initializer() sess.run(init) @@ -180,20 +170,15 @@ def test_ppo_model_cc_visual(mock_communicator, mock_launcher): model.epsilon: np.array([[0, 1], [2, 3]]), } sess.run(run_list, feed_dict=feed_dict) - env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_ppo_model_dc_visual(mock_communicator, mock_launcher): +def test_ppo_model_dc_visual(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=True, visual_inputs=2 + model = PPOModel( + make_brain_parameters(discrete_action=True, visual_inputs=2) ) - env = UnityEnvironment(" ") - model = PPOModel(env.brains["RealFakeBrain"]) init = tf.global_variables_initializer() sess.run(init) @@ -213,20 +198,15 @@ def test_ppo_model_dc_visual(mock_communicator, mock_launcher): model.action_masks: np.ones([2, 2]), } sess.run(run_list, feed_dict=feed_dict) - env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_ppo_model_dc_vector(mock_communicator, mock_launcher): +def test_ppo_model_dc_vector(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=True, visual_inputs=0 + model = PPOModel( + make_brain_parameters(discrete_action=True, visual_inputs=0) ) - env = UnityEnvironment(" ") - model = PPOModel(env.brains["RealFakeBrain"]) init = tf.global_variables_initializer() sess.run(init) @@ -244,22 +224,17 @@ def test_ppo_model_dc_vector(mock_communicator, mock_launcher): model.action_masks: np.ones([2, 2]), } sess.run(run_list, feed_dict=feed_dict) - env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_ppo_model_dc_vector_rnn(mock_communicator, mock_launcher): +def test_ppo_model_dc_vector_rnn(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=True, visual_inputs=0 - ) - env = UnityEnvironment(" ") memory_size = 128 model = PPOModel( - env.brains["RealFakeBrain"], use_recurrent=True, m_size=memory_size + make_brain_parameters(discrete_action=True, visual_inputs=0), + use_recurrent=True, + m_size=memory_size, ) init = tf.global_variables_initializer() sess.run(init) @@ -281,22 +256,17 @@ def test_ppo_model_dc_vector_rnn(mock_communicator, mock_launcher): model.action_masks: np.ones([1, 2]), } sess.run(run_list, feed_dict=feed_dict) - env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_ppo_model_cc_vector_rnn(mock_communicator, mock_launcher): +def test_ppo_model_cc_vector_rnn(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=False, visual_inputs=0 - ) - env = UnityEnvironment(" ") memory_size = 128 model = PPOModel( - env.brains["RealFakeBrain"], use_recurrent=True, m_size=memory_size + make_brain_parameters(discrete_action=False, visual_inputs=0), + use_recurrent=True, + m_size=memory_size, ) init = tf.global_variables_initializer() sess.run(init) @@ -317,7 +287,6 @@ def test_ppo_model_cc_vector_rnn(mock_communicator, mock_launcher): model.epsilon: np.array([[0, 1]]), } sess.run(run_list, feed_dict=feed_dict) - env.close() def test_rl_functions(): @@ -358,6 +327,12 @@ def test_trainer_update_policy(mock_env, dummy_config, use_discrete): trainer_params = dummy_config trainer_params["use_recurrent"] = True + # Test curiosity reward signal + trainer_params["reward_signals"]["curiosity"] = {} + trainer_params["reward_signals"]["curiosity"]["strength"] = 1.0 + trainer_params["reward_signals"]["curiosity"]["gamma"] = 0.99 + trainer_params["reward_signals"]["curiosity"]["encoding_size"] = 128 + trainer = PPOTrainer(mock_brain, 0, trainer_params, True, False, 0, "0", False) # Test update with sequence length smaller than batch size buffer = mb.simulate_rollout(env, trainer.policy, BUFFER_INIT_SAMPLES) @@ -365,6 +340,10 @@ def test_trainer_update_policy(mock_env, dummy_config, use_discrete): buffer.update_buffer["extrinsic_rewards"] = buffer.update_buffer["rewards"] buffer.update_buffer["extrinsic_returns"] = buffer.update_buffer["rewards"] buffer.update_buffer["extrinsic_value_estimates"] = buffer.update_buffer["rewards"] + buffer.update_buffer["curiosity_rewards"] = buffer.update_buffer["rewards"] + buffer.update_buffer["curiosity_returns"] = buffer.update_buffer["rewards"] + buffer.update_buffer["curiosity_value_estimates"] = buffer.update_buffer["rewards"] + trainer.training_buffer = buffer trainer.update_policy() # Make batch length a larger multiple of sequence length diff --git a/ml-agents/mlagents/trainers/tests/test_reward_signals.py b/ml-agents/mlagents/trainers/tests/test_reward_signals.py index 43cce5c3b8..b1d8e9f5e4 100644 --- a/ml-agents/mlagents/trainers/tests/test_reward_signals.py +++ b/ml-agents/mlagents/trainers/tests/test_reward_signals.py @@ -108,7 +108,7 @@ def create_policy_mock( ) trainer_parameters = trainer_config - model_path = env.brain_names[0] + model_path = env.external_brain_names[0] trainer_parameters["model_path"] = model_path trainer_parameters["keep_checkpoints"] = 3 trainer_parameters["reward_signals"].update(reward_signal_config) @@ -122,8 +122,8 @@ def create_policy_mock( def reward_signal_eval(env, policy, reward_signal_name): brain_infos = env.reset() - brain_info = brain_infos[env.brain_names[0]] - next_brain_info = env.step()[env.brain_names[0]] + brain_info = brain_infos[env.external_brain_names[0]] + next_brain_info = env.step()[env.external_brain_names[0]] # Test evaluate rsig_result = policy.reward_signals[reward_signal_name].evaluate( brain_info, next_brain_info diff --git a/ml-agents/mlagents/trainers/tests/test_rl_trainer.py b/ml-agents/mlagents/trainers/tests/test_rl_trainer.py index b1b43c0f2f..36da0ae769 100644 --- a/ml-agents/mlagents/trainers/tests/test_rl_trainer.py +++ b/ml-agents/mlagents/trainers/tests/test_rl_trainer.py @@ -4,6 +4,7 @@ import mlagents.trainers.tests.mock_brain as mb import numpy as np from mlagents.trainers.rl_trainer import RLTrainer +from mlagents.trainers.tests.test_buffer import construct_fake_buffer @pytest.fixture @@ -92,3 +93,12 @@ def test_rl_trainer(add_policy_outputs, add_rewards_outputs, num_vis_obs): for rewards in trainer.collected_rewards.values(): for agent_id in rewards: assert rewards[agent_id] == 0 + + +def test_clear_update_buffer(): + trainer = create_rl_trainer() + trainer.training_buffer = construct_fake_buffer() + trainer.training_buffer.append_update_buffer(2, batch_size=None, training_length=2) + trainer.clear_update_buffer() + for _, arr in trainer.training_buffer.update_buffer.items(): + assert len(arr) == 0 diff --git a/ml-agents/mlagents/trainers/tests/test_sac.py b/ml-agents/mlagents/trainers/tests/test_sac.py index bdaa1cf10d..b3ad9a8934 100644 --- a/ml-agents/mlagents/trainers/tests/test_sac.py +++ b/ml-agents/mlagents/trainers/tests/test_sac.py @@ -8,9 +8,8 @@ from mlagents.trainers.sac.models import SACModel from mlagents.trainers.sac.policy import SACPolicy from mlagents.trainers.sac.trainer import SACTrainer -from mlagents.envs.environment import UnityEnvironment -from mlagents.envs.mock_communicator import MockCommunicator from mlagents.trainers.tests import mock_brain as mb +from mlagents.trainers.tests.mock_brain import make_brain_parameters @pytest.fixture @@ -65,7 +64,7 @@ def create_sac_policy_mock(mock_env, dummy_config, use_rnn, use_discrete, use_vi ) trainer_parameters = dummy_config - model_path = env.brain_names[0] + model_path = env.external_brain_names[0] trainer_parameters["model_path"] = model_path trainer_parameters["keep_checkpoints"] = 3 trainer_parameters["use_recurrent"] = use_rnn @@ -81,7 +80,7 @@ def test_sac_cc_policy(mock_env, dummy_config): mock_env, dummy_config, use_rnn=False, use_discrete=False, use_visual=False ) brain_infos = env.reset() - brain_info = brain_infos[env.brain_names[0]] + brain_info = brain_infos[env.external_brain_names[0]] run_out = policy.evaluate(brain_info) assert run_out["action"].shape == (NUM_AGENTS, VECTOR_ACTION_SPACE[0]) @@ -95,8 +94,9 @@ def test_sac_cc_policy(mock_env, dummy_config): env.close() +@pytest.mark.parametrize("discrete", [True, False], ids=["discrete", "continuous"]) @mock.patch("mlagents.envs.environment.UnityEnvironment") -def test_sac_update_reward_signals(mock_env, dummy_config): +def test_sac_update_reward_signals(mock_env, dummy_config, discrete): # Test evaluate tf.reset_default_graph() # Add a Curiosity module @@ -105,11 +105,17 @@ def test_sac_update_reward_signals(mock_env, dummy_config): dummy_config["reward_signals"]["curiosity"]["gamma"] = 0.99 dummy_config["reward_signals"]["curiosity"]["encoding_size"] = 128 env, policy = create_sac_policy_mock( - mock_env, dummy_config, use_rnn=False, use_discrete=False, use_visual=False + mock_env, dummy_config, use_rnn=False, use_discrete=discrete, use_visual=False + ) + + # Test update, while removing PPO-specific buffer elements. + buffer = mb.simulate_rollout( + env, + policy, + BUFFER_INIT_SAMPLES, + exclude_key_list=["advantages", "actions_pre", "random_normal_epsilon"], ) - # Test update - buffer = mb.simulate_rollout(env, policy, BUFFER_INIT_SAMPLES) # Mock out reward signal eval buffer.update_buffer["extrinsic_rewards"] = buffer.update_buffer["rewards"] buffer.update_buffer["curiosity_rewards"] = buffer.update_buffer["rewards"] @@ -128,7 +134,7 @@ def test_sac_dc_policy(mock_env, dummy_config): mock_env, dummy_config, use_rnn=False, use_discrete=True, use_visual=False ) brain_infos = env.reset() - brain_info = brain_infos[env.brain_names[0]] + brain_info = brain_infos[env.external_brain_names[0]] run_out = policy.evaluate(brain_info) assert run_out["action"].shape == (NUM_AGENTS, len(DISCRETE_ACTION_SPACE)) @@ -150,7 +156,7 @@ def test_sac_visual_policy(mock_env, dummy_config): mock_env, dummy_config, use_rnn=False, use_discrete=True, use_visual=True ) brain_infos = env.reset() - brain_info = brain_infos[env.brain_names[0]] + brain_info = brain_infos[env.external_brain_names[0]] run_out = policy.evaluate(brain_info) assert run_out["action"].shape == (NUM_AGENTS, len(DISCRETE_ACTION_SPACE)) @@ -172,7 +178,7 @@ def test_sac_rnn_policy(mock_env, dummy_config): mock_env, dummy_config, use_rnn=True, use_discrete=True, use_visual=False ) brain_infos = env.reset() - brain_info = brain_infos[env.brain_names[0]] + brain_info = brain_infos[env.external_brain_names[0]] run_out = policy.evaluate(brain_info) assert run_out["action"].shape == (NUM_AGENTS, len(DISCRETE_ACTION_SPACE)) @@ -184,18 +190,13 @@ def test_sac_rnn_policy(mock_env, dummy_config): env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_sac_model_cc_vector(mock_communicator, mock_launcher): +def test_sac_model_cc_vector(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=False, visual_inputs=0 + model = SACModel( + make_brain_parameters(discrete_action=False, visual_inputs=0) ) - env = UnityEnvironment(" ") - - model = SACModel(env.brains["RealFakeBrain"]) init = tf.global_variables_initializer() sess.run(init) @@ -206,21 +207,15 @@ def test_sac_model_cc_vector(mock_communicator, mock_launcher): model.vector_in: np.array([[1, 2, 3, 1, 2, 3], [3, 4, 5, 3, 4, 5]]), } sess.run(run_list, feed_dict=feed_dict) - env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_sac_model_cc_visual(mock_communicator, mock_launcher): +def test_sac_model_cc_visual(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=False, visual_inputs=2 + model = SACModel( + make_brain_parameters(discrete_action=False, visual_inputs=2) ) - env = UnityEnvironment(" ") - - model = SACModel(env.brains["RealFakeBrain"]) init = tf.global_variables_initializer() sess.run(init) @@ -233,20 +228,15 @@ def test_sac_model_cc_visual(mock_communicator, mock_launcher): model.visual_in[1]: np.ones([2, 40, 30, 3]), } sess.run(run_list, feed_dict=feed_dict) - env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_sac_model_dc_visual(mock_communicator, mock_launcher): +def test_sac_model_dc_visual(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=True, visual_inputs=2 + model = SACModel( + make_brain_parameters(discrete_action=True, visual_inputs=2) ) - env = UnityEnvironment(" ") - model = SACModel(env.brains["RealFakeBrain"]) init = tf.global_variables_initializer() sess.run(init) @@ -260,20 +250,15 @@ def test_sac_model_dc_visual(mock_communicator, mock_launcher): model.action_masks: np.ones([2, 2]), } sess.run(run_list, feed_dict=feed_dict) - env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_sac_model_dc_vector(mock_communicator, mock_launcher): +def test_sac_model_dc_vector(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=True, visual_inputs=0 + model = SACModel( + make_brain_parameters(discrete_action=True, visual_inputs=0) ) - env = UnityEnvironment(" ") - model = SACModel(env.brains["RealFakeBrain"]) init = tf.global_variables_initializer() sess.run(init) @@ -285,22 +270,17 @@ def test_sac_model_dc_vector(mock_communicator, mock_launcher): model.action_masks: np.ones([2, 2]), } sess.run(run_list, feed_dict=feed_dict) - env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_sac_model_dc_vector_rnn(mock_communicator, mock_launcher): +def test_sac_model_dc_vector_rnn(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=True, visual_inputs=0 - ) - env = UnityEnvironment(" ") memory_size = 128 model = SACModel( - env.brains["RealFakeBrain"], use_recurrent=True, m_size=memory_size + make_brain_parameters(discrete_action=True, visual_inputs=0), + use_recurrent=True, + m_size=memory_size, ) init = tf.global_variables_initializer() sess.run(init) @@ -322,22 +302,17 @@ def test_sac_model_dc_vector_rnn(mock_communicator, mock_launcher): model.action_masks: np.ones([1, 2]), } sess.run(run_list, feed_dict=feed_dict) - env.close() -@mock.patch("mlagents.envs.environment.UnityEnvironment.executable_launcher") -@mock.patch("mlagents.envs.environment.UnityEnvironment.get_communicator") -def test_sac_model_cc_vector_rnn(mock_communicator, mock_launcher): +def test_sac_model_cc_vector_rnn(): tf.reset_default_graph() with tf.Session() as sess: with tf.variable_scope("FakeGraphScope"): - mock_communicator.return_value = MockCommunicator( - discrete_action=False, visual_inputs=0 - ) - env = UnityEnvironment(" ") memory_size = 128 model = SACModel( - env.brains["RealFakeBrain"], use_recurrent=True, m_size=memory_size + make_brain_parameters(discrete_action=False, visual_inputs=0), + use_recurrent=True, + m_size=memory_size, ) init = tf.global_variables_initializer() sess.run(init) @@ -357,7 +332,6 @@ def test_sac_model_cc_vector_rnn(mock_communicator, mock_launcher): model.vector_in: np.array([[1, 2, 3, 1, 2, 3], [3, 4, 5, 3, 4, 5]]), } sess.run(run_list, feed_dict=feed_dict) - env.close() def test_sac_save_load_buffer(tmpdir): diff --git a/ml-agents/mlagents/trainers/tests/test_simple_rl.py b/ml-agents/mlagents/trainers/tests/test_simple_rl.py index 082b711742..339fc7287b 100644 --- a/ml-agents/mlagents/trainers/tests/test_simple_rl.py +++ b/ml-agents/mlagents/trainers/tests/test_simple_rl.py @@ -7,10 +7,10 @@ from mlagents.trainers.trainer_controller import TrainerController -from mlagents.trainers.trainer_util import initialize_trainers +from mlagents.trainers.trainer_util import TrainerFactory from mlagents.envs.base_unity_environment import BaseUnityEnvironment from mlagents.envs.brain import BrainInfo, AllBrainInfo, BrainParameters -from mlagents.envs.communicator_objects.agent_info_proto_pb2 import AgentInfoProto +from mlagents.envs.communicator_objects.agent_info_pb2 import AgentInfoProto from mlagents.envs.simple_env_manager import SimpleEnvManager from mlagents.envs.sampler_class import SamplerManager @@ -192,9 +192,8 @@ def _check_environment_trains(env, config): trainer_config = yaml.safe_load(config) env_manager = SimpleEnvManager(env) - trainers = initialize_trainers( + trainer_factory = TrainerFactory( trainer_config=trainer_config, - external_brains=env_manager.external_brains, summaries_dir=dir, run_id=run_id, model_path=dir, @@ -207,7 +206,7 @@ def _check_environment_trains(env, config): ) tc = TrainerController( - trainers=trainers, + trainer_factory=trainer_factory, summaries_dir=dir, model_path=dir, run_id=run_id, diff --git a/ml-agents/mlagents/trainers/tests/test_trainer_controller.py b/ml-agents/mlagents/trainers/tests/test_trainer_controller.py index 5cf9f2e149..3a918afc52 100644 --- a/ml-agents/mlagents/trainers/tests/test_trainer_controller.py +++ b/ml-agents/mlagents/trainers/tests/test_trainer_controller.py @@ -41,6 +41,7 @@ def dummy_config(): @pytest.fixture def basic_trainer_controller(): return TrainerController( + trainer_factory=None, model_path="test_model_path", summaries_dir="test_summaries_dir", run_id="test_run_id", @@ -51,7 +52,6 @@ def basic_trainer_controller(): fast_simulation=True, sampler_manager=SamplerManager({}), resampling_interval=None, - trainers={}, ) @@ -60,6 +60,7 @@ def basic_trainer_controller(): def test_initialization_seed(numpy_random_seed, tensorflow_set_seed): seed = 27 TrainerController( + trainer_factory=None, model_path="", summaries_dir="", run_id="1", @@ -70,7 +71,6 @@ def test_initialization_seed(numpy_random_seed, tensorflow_set_seed): fast_simulation=True, sampler_manager=SamplerManager({}), resampling_interval=None, - trainers={}, ) numpy_random_seed.assert_called_with(seed) tensorflow_set_seed.assert_called_with(seed) @@ -158,8 +158,10 @@ def trainer_controller_with_take_step_mocks(): def test_take_step_adds_experiences_to_trainer_and_trains(): tc, trainer_mock = trainer_controller_with_take_step_mocks() - old_step_info = EnvironmentStep(Mock(), Mock(), MagicMock()) - new_step_info = EnvironmentStep(Mock(), Mock(), MagicMock()) + action_info_dict = {"testbrain": MagicMock()} + + old_step_info = EnvironmentStep(Mock(), Mock(), action_info_dict) + new_step_info = EnvironmentStep(Mock(), Mock(), action_info_dict) trainer_mock.is_ready_update = MagicMock(return_value=True) env_mock = MagicMock() @@ -179,3 +181,31 @@ def test_take_step_adds_experiences_to_trainer_and_trains(): ) trainer_mock.update_policy.assert_called_once() trainer_mock.increment_step.assert_called_once() + + +def test_take_step_if_not_training(): + tc, trainer_mock = trainer_controller_with_take_step_mocks() + tc.train_model = False + + action_info_dict = {"testbrain": MagicMock()} + + old_step_info = EnvironmentStep(Mock(), Mock(), action_info_dict) + new_step_info = EnvironmentStep(Mock(), Mock(), action_info_dict) + trainer_mock.is_ready_update = MagicMock(return_value=False) + + env_mock = MagicMock() + env_mock.step.return_value = [new_step_info] + env_mock.reset.return_value = [old_step_info] + + tc.advance(env_mock) + env_mock.reset.assert_not_called() + env_mock.step.assert_called_once() + trainer_mock.add_experiences.assert_called_once_with( + new_step_info.previous_all_brain_info, + new_step_info.current_all_brain_info, + new_step_info.brain_name_to_action_info["testbrain"].outputs, + ) + trainer_mock.process_experiences.assert_called_once_with( + new_step_info.previous_all_brain_info, new_step_info.current_all_brain_info + ) + trainer_mock.clear_update_buffer.assert_called_once() diff --git a/ml-agents/mlagents/trainers/tests/test_trainer_util.py b/ml-agents/mlagents/trainers/tests/test_trainer_util.py index 61b3d910d5..a17da76867 100644 --- a/ml-agents/mlagents/trainers/tests/test_trainer_util.py +++ b/ml-agents/mlagents/trainers/tests/test_trainer_util.py @@ -9,7 +9,6 @@ from mlagents.trainers.trainer_metrics import TrainerMetrics from mlagents.trainers.ppo.trainer import PPOTrainer from mlagents.trainers.bc.offline_trainer import OfflineBCTrainer -from mlagents.trainers.bc.online_trainer import OnlineBCTrainer from mlagents.envs.exception import UnityEnvironmentException @@ -43,38 +42,6 @@ def dummy_config(): ) -@pytest.fixture -def dummy_online_bc_config(): - return yaml.safe_load( - """ - default: - trainer: online_bc - brain_to_imitate: ExpertBrain - batches_per_epoch: 16 - batch_size: 32 - beta: 5.0e-3 - buffer_size: 512 - epsilon: 0.2 - gamma: 0.99 - hidden_units: 128 - lambd: 0.95 - learning_rate: 3.0e-4 - max_steps: 5.0e4 - normalize: true - num_epoch: 5 - num_layers: 2 - time_horizon: 64 - sequence_length: 64 - summary_freq: 1000 - use_recurrent: false - memory_size: 8 - use_curiosity: false - curiosity_strength: 0.0 - curiosity_enc_size: 1 - """ - ) - - @pytest.fixture def dummy_offline_bc_config(): return yaml.safe_load( @@ -166,6 +133,7 @@ def test_initialize_trainer_parameters_override_defaults(BrainParametersMock): expected_config["normalize"] = False brain_params_mock = BrainParametersMock() + BrainParametersMock.return_value.brain_name = "testbrain" external_brains = {"testbrain": brain_params_mock} def mock_constructor(self, brain, trainer_parameters, training, load, seed, run_id): @@ -177,9 +145,8 @@ def mock_constructor(self, brain, trainer_parameters, training, load, seed, run_ assert run_id == run_id with patch.object(OfflineBCTrainer, "__init__", mock_constructor): - trainers = trainer_util.initialize_trainers( + trainer_factory = trainer_util.TrainerFactory( trainer_config=base_config, - external_brains=external_brains, summaries_dir=summaries_dir, run_id=run_id, model_path=model_path, @@ -188,56 +155,17 @@ def mock_constructor(self, brain, trainer_parameters, training, load, seed, run_ load_model=load_model, seed=seed, ) + trainers = {} + for _, brain_parameters in external_brains.items(): + trainers["testbrain"] = trainer_factory.generate(brain_parameters) assert "testbrain" in trainers assert isinstance(trainers["testbrain"], OfflineBCTrainer) -@patch("mlagents.envs.brain.BrainParameters") -def test_initialize_online_bc_trainer(BrainParametersMock): - summaries_dir = "test_dir" - run_id = "testrun" - model_path = "model_dir" - keep_checkpoints = 1 - train_model = True - load_model = False - seed = 11 - - base_config = dummy_online_bc_config() - expected_config = base_config["default"] - expected_config["summary_path"] = summaries_dir + f"/{run_id}_testbrain" - expected_config["model_path"] = model_path + "/testbrain" - expected_config["keep_checkpoints"] = keep_checkpoints - - brain_params_mock = BrainParametersMock() - external_brains = {"testbrain": brain_params_mock} - - def mock_constructor(self, brain, trainer_parameters, training, load, seed, run_id): - assert brain == brain_params_mock - assert trainer_parameters == expected_config - assert training == train_model - assert load == load_model - assert seed == seed - assert run_id == run_id - - with patch.object(OnlineBCTrainer, "__init__", mock_constructor): - trainers = trainer_util.initialize_trainers( - trainer_config=base_config, - external_brains=external_brains, - summaries_dir=summaries_dir, - run_id=run_id, - model_path=model_path, - keep_checkpoints=keep_checkpoints, - train_model=train_model, - load_model=load_model, - seed=seed, - ) - assert "testbrain" in trainers - assert isinstance(trainers["testbrain"], OnlineBCTrainer) - - @patch("mlagents.envs.brain.BrainParameters") def test_initialize_ppo_trainer(BrainParametersMock): brain_params_mock = BrainParametersMock() + BrainParametersMock.return_value.brain_name = "testbrain" external_brains = {"testbrain": BrainParametersMock()} summaries_dir = "test_dir" run_id = "testrun" @@ -276,9 +204,8 @@ def mock_constructor( assert multi_gpu == multi_gpu with patch.object(PPOTrainer, "__init__", mock_constructor): - trainers = trainer_util.initialize_trainers( + trainer_factory = trainer_util.TrainerFactory( trainer_config=base_config, - external_brains=external_brains, summaries_dir=summaries_dir, run_id=run_id, model_path=model_path, @@ -287,6 +214,9 @@ def mock_constructor( load_model=load_model, seed=seed, ) + trainers = {} + for brain_name, brain_parameters in external_brains.items(): + trainers[brain_name] = trainer_factory.generate(brain_parameters) assert "testbrain" in trainers assert isinstance(trainers["testbrain"], PPOTrainer) @@ -301,12 +231,12 @@ def test_initialize_invalid_trainer_raises_exception(BrainParametersMock): load_model = False seed = 11 bad_config = dummy_bad_config() + BrainParametersMock.return_value.brain_name = "testbrain" external_brains = {"testbrain": BrainParametersMock()} with pytest.raises(UnityEnvironmentException): - trainer_util.initialize_trainers( + trainer_factory = trainer_util.TrainerFactory( trainer_config=bad_config, - external_brains=external_brains, summaries_dir=summaries_dir, run_id=run_id, model_path=model_path, @@ -315,6 +245,9 @@ def test_initialize_invalid_trainer_raises_exception(BrainParametersMock): load_model=load_model, seed=seed, ) + trainers = {} + for brain_name, brain_parameters in external_brains.items(): + trainers[brain_name] = trainer_factory.generate(brain_parameters) def test_load_config_missing_file(): diff --git a/ml-agents/mlagents/trainers/tests/testdcvis.demo b/ml-agents/mlagents/trainers/tests/testdcvis.demo index b46b1c664b..3933a3920f 100644 Binary files a/ml-agents/mlagents/trainers/tests/testdcvis.demo and b/ml-agents/mlagents/trainers/tests/testdcvis.demo differ diff --git a/ml-agents/mlagents/trainers/trainer_controller.py b/ml-agents/mlagents/trainers/trainer_controller.py index 890eeb0d9c..6007807d59 100644 --- a/ml-agents/mlagents/trainers/trainer_controller.py +++ b/ml-agents/mlagents/trainers/trainer_controller.py @@ -5,7 +5,7 @@ import os import json import logging -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Set import numpy as np import tensorflow as tf @@ -21,12 +21,13 @@ from mlagents.envs.timers import hierarchical_timer, get_timer_tree, timed from mlagents.trainers.trainer import Trainer, TrainerMetrics from mlagents.trainers.meta_curriculum import MetaCurriculum +from mlagents.trainers.trainer_util import TrainerFactory class TrainerController(object): def __init__( self, - trainers: Dict[str, Trainer], + trainer_factory: TrainerFactory, model_path: str, summaries_dir: str, run_id: str, @@ -39,7 +40,6 @@ def __init__( resampling_interval: Optional[int], ): """ - :param trainers: Trainers for each brain to train. :param model_path: Path to save the model. :param summaries_dir: Folder to save training summaries. :param run_id: The sub-directory name for model and summary statistics @@ -50,7 +50,8 @@ def __init__( :param sampler_manager: SamplerManager object handles samplers for resampling the reset parameters. :param resampling_interval: Specifies number of simulation steps after which reset parameters are resampled. """ - self.trainers = trainers + self.trainers: Dict[str, Trainer] = {} + self.trainer_factory = trainer_factory self.model_path = model_path self.summaries_dir = summaries_dir self.logger = logging.getLogger("mlagents.envs") @@ -162,9 +163,9 @@ def _should_save_model(self, global_step: int) -> bool: def _not_done_training(self) -> bool: return ( - any([t.get_step <= t.get_max_steps for k, t in self.trainers.items()]) + any(t.get_step <= t.get_max_steps for k, t in self.trainers.items()) or not self.train_model - ) + ) or len(self.trainers) == 0 def write_to_tensorboard(self, global_step: int) -> None: for brain_name, trainer in self.trainers.items(): @@ -181,24 +182,30 @@ def write_to_tensorboard(self, global_step: int) -> None: else: trainer.write_summary(global_step, delta_train_start) + def start_trainer(self, trainer: Trainer, env_manager: EnvManager) -> None: + self.trainers[trainer.brain_name] = trainer + self.logger.info(trainer) + if self.train_model: + trainer.write_tensorboard_text("Hyperparameters", trainer.parameters) + env_manager.set_policy(trainer.brain_name, trainer.policy) + def start_learning(self, env_manager: EnvManager) -> None: self._create_model_path(self.model_path) - tf.reset_default_graph() - - for _, t in self.trainers.items(): - self.logger.info(t) - global_step = 0 - - if self.train_model: - for brain_name, trainer in self.trainers.items(): - trainer.write_tensorboard_text("Hyperparameters", trainer.parameters) + last_brain_names: Set[str] = set() try: - for brain_name, trainer in self.trainers.items(): - env_manager.set_policy(brain_name, trainer.policy) self._reset_env(env_manager) while self._not_done_training(): + external_brains = set(env_manager.external_brains.keys()) + new_brains = external_brains - last_brain_names + if last_brain_names != env_manager.external_brains.keys(): + for name in new_brains: + trainer = self.trainer_factory.generate( + env_manager.external_brains[name] + ) + self.start_trainer(trainer, env_manager) + last_brain_names = external_brains n_steps = self.advance(env_manager) for i in range(n_steps): global_step += 1 @@ -245,14 +252,11 @@ def reset_env_if_ready(self, env: EnvManager, steps: int) -> None: ) else: lessons_incremented = {} - # If any lessons were incremented or the environment is # ready to be reset meta_curriculum_reset = any(lessons_incremented.values()) - # Check if we are performing generalization training and we have finished the # specified number of steps for the lesson - generalization_reset = ( not self.sampler_manager.is_empty() and (steps != 0) @@ -268,19 +272,20 @@ def advance(self, env: EnvManager) -> int: time_start_step = time() new_step_infos = env.step() delta_time_step = time() - time_start_step - for step_info in new_step_infos: for brain_name, trainer in self.trainers.items(): if brain_name in self.trainer_metrics: self.trainer_metrics[brain_name].add_delta_step(delta_time_step) - trainer.add_experiences( - step_info.previous_all_brain_info, - step_info.current_all_brain_info, - step_info.brain_name_to_action_info[brain_name].outputs, - ) - trainer.process_experiences( - step_info.previous_all_brain_info, step_info.current_all_brain_info - ) + if brain_name in step_info.brain_name_to_action_info: + trainer.add_experiences( + step_info.previous_all_brain_info, + step_info.current_all_brain_info, + step_info.brain_name_to_action_info[brain_name].outputs, + ) + trainer.process_experiences( + step_info.previous_all_brain_info, + step_info.current_all_brain_info, + ) for brain_name, trainer in self.trainers.items(): if brain_name in self.trainer_metrics: self.trainer_metrics[brain_name].add_delta_step(delta_time_step) @@ -291,4 +296,7 @@ def advance(self, env: EnvManager) -> int: with hierarchical_timer("update_policy"): trainer.update_policy() env.set_policy(brain_name, trainer.policy) + else: + # Avoid memory leak during inference + trainer.clear_update_buffer() return len(new_step_infos) diff --git a/ml-agents/mlagents/trainers/trainer_util.py b/ml-agents/mlagents/trainers/trainer_util.py index 4c4f26519a..7539a8acba 100644 --- a/ml-agents/mlagents/trainers/trainer_util.py +++ b/ml-agents/mlagents/trainers/trainer_util.py @@ -8,12 +8,52 @@ from mlagents.trainers.ppo.trainer import PPOTrainer from mlagents.trainers.sac.trainer import SACTrainer from mlagents.trainers.bc.offline_trainer import OfflineBCTrainer -from mlagents.trainers.bc.online_trainer import OnlineBCTrainer -def initialize_trainers( - trainer_config: Dict[str, Any], - external_brains: Dict[str, BrainParameters], +class TrainerFactory: + def __init__( + self, + trainer_config: Any, + summaries_dir: str, + run_id: str, + model_path: str, + keep_checkpoints: int, + train_model: bool, + load_model: bool, + seed: int, + meta_curriculum: MetaCurriculum = None, + multi_gpu: bool = False, + ): + self.trainer_config = trainer_config + self.summaries_dir = summaries_dir + self.run_id = run_id + self.model_path = model_path + self.keep_checkpoints = keep_checkpoints + self.train_model = train_model + self.load_model = load_model + self.seed = seed + self.meta_curriculum = meta_curriculum + self.multi_gpu = multi_gpu + + def generate(self, brain_parameters: BrainParameters) -> Trainer: + return initialize_trainer( + self.trainer_config, + brain_parameters, + self.summaries_dir, + self.run_id, + self.model_path, + self.keep_checkpoints, + self.train_model, + self.load_model, + self.seed, + self.meta_curriculum, + self.multi_gpu, + ) + + +def initialize_trainer( + trainer_config: Any, + brain_parameters: BrainParameters, summaries_dir: str, run_id: str, model_path: str, @@ -23,13 +63,13 @@ def initialize_trainers( seed: int, meta_curriculum: MetaCurriculum = None, multi_gpu: bool = False, -) -> Dict[str, Trainer]: +) -> Trainer: """ - Initializes trainers given a provided trainer configuration and set of brains from the environment, as well as + Initializes a trainer given a provided trainer configuration and brain parameters, as well as some general training session options. :param trainer_config: Original trainer configuration loaded from YAML - :param external_brains: BrainParameters provided by the Unity environment + :param brain_parameters: BrainParameters provided by the Unity environment :param summaries_dir: Directory to store trainer summary statistics :param run_id: Run ID to associate with this training run :param model_path: Path to save the model @@ -41,74 +81,58 @@ def initialize_trainers( :param multi_gpu: Whether to use multi-GPU training :return: """ - trainers: Dict[str, Trainer] = {} - trainer_parameters_dict = {} - for brain_name in external_brains: - trainer_parameters = trainer_config["default"].copy() - trainer_parameters["summary_path"] = "{basedir}/{name}".format( - basedir=summaries_dir, name=str(run_id) + "_" + brain_name + trainer_parameters = trainer_config["default"].copy() + brain_name = brain_parameters.brain_name + trainer_parameters["summary_path"] = "{basedir}/{name}".format( + basedir=summaries_dir, name=str(run_id) + "_" + brain_name + ) + trainer_parameters["model_path"] = "{basedir}/{name}".format( + basedir=model_path, name=brain_name + ) + trainer_parameters["keep_checkpoints"] = keep_checkpoints + if brain_name in trainer_config: + _brain_key: Any = brain_name + while not isinstance(trainer_config[_brain_key], dict): + _brain_key = trainer_config[_brain_key] + trainer_parameters.update(trainer_config[_brain_key]) + + trainer = None + if trainer_parameters["trainer"] == "offline_bc": + trainer = OfflineBCTrainer( + brain_parameters, trainer_parameters, train_model, load_model, seed, run_id + ) + elif trainer_parameters["trainer"] == "ppo": + trainer = PPOTrainer( + brain_parameters, + meta_curriculum.brains_to_curriculums[brain_name].min_lesson_length + if meta_curriculum + else 1, + trainer_parameters, + train_model, + load_model, + seed, + run_id, + multi_gpu, ) - trainer_parameters["model_path"] = "{basedir}/{name}".format( - basedir=model_path, name=brain_name + elif trainer_parameters["trainer"] == "sac": + trainer = SACTrainer( + brain_parameters, + meta_curriculum.brains_to_curriculums[brain_name].min_lesson_length + if meta_curriculum + else 1, + trainer_parameters, + train_model, + load_model, + seed, + run_id, + ) + else: + raise UnityEnvironmentException( + "The trainer config contains " + "an unknown trainer type for " + "brain {}".format(brain_name) ) - trainer_parameters["keep_checkpoints"] = keep_checkpoints - if brain_name in trainer_config: - _brain_key: Any = brain_name - while not isinstance(trainer_config[_brain_key], dict): - _brain_key = trainer_config[_brain_key] - trainer_parameters.update(trainer_config[_brain_key]) - trainer_parameters_dict[brain_name] = trainer_parameters.copy() - for brain_name in external_brains: - if trainer_parameters_dict[brain_name]["trainer"] == "offline_bc": - trainers[brain_name] = OfflineBCTrainer( - external_brains[brain_name], - trainer_parameters_dict[brain_name], - train_model, - load_model, - seed, - run_id, - ) - elif trainer_parameters_dict[brain_name]["trainer"] == "online_bc": - trainers[brain_name] = OnlineBCTrainer( - external_brains[brain_name], - trainer_parameters_dict[brain_name], - train_model, - load_model, - seed, - run_id, - ) - elif trainer_parameters_dict[brain_name]["trainer"] == "ppo": - trainers[brain_name] = PPOTrainer( - external_brains[brain_name], - meta_curriculum.brains_to_curriculums[brain_name].min_lesson_length - if meta_curriculum - else 1, - trainer_parameters_dict[brain_name], - train_model, - load_model, - seed, - run_id, - multi_gpu, - ) - elif trainer_parameters_dict[brain_name]["trainer"] == "sac": - trainers[brain_name] = SACTrainer( - external_brains[brain_name], - meta_curriculum.brains_to_curriculums[brain_name].min_lesson_length - if meta_curriculum - else 1, - trainer_parameters_dict[brain_name], - train_model, - load_model, - seed, - run_id, - ) - else: - raise UnityEnvironmentException( - "The trainer config contains " - "an unknown trainer type for " - "brain {}".format(brain_name) - ) - return trainers + return trainer def load_config(config_path: str) -> Dict[str, Any]: diff --git a/ml-agents/setup.py b/ml-agents/setup.py index 476071b66e..5211612ee4 100644 --- a/ml-agents/setup.py +++ b/ml-agents/setup.py @@ -1,16 +1,40 @@ -from setuptools import setup, find_namespace_packages -from os import path from io import open +import os +import sys + +from setuptools import setup, find_namespace_packages +from setuptools.command.install import install + +VERSION = "0.11.0" + +here = os.path.abspath(os.path.dirname(__file__)) + + +class VerifyVersionCommand(install): + """ + Custom command to verify that the git tag matches our version + See https://circleci.com/blog/continuously-deploying-python-packages-to-pypi-with-circleci/ + """ + + description = "verify that the git tag matches our version" + + def run(self): + tag = os.getenv("CIRCLE_TAG") + + if tag != VERSION: + info = "Git tag: {0} does not match the version of this app: {1}".format( + tag, VERSION + ) + sys.exit(info) -here = path.abspath(path.dirname(__file__)) # Get the long description from the README file -with open(path.join(here, "README.md"), encoding="utf-8") as f: +with open(os.path.join(here, "README.md"), encoding="utf-8") as f: long_description = f.read() setup( name="mlagents", - version="0.10.1", + version=VERSION, description="Unity Machine Learning Agents", long_description=long_description, long_description_content_type="text/markdown", @@ -35,7 +59,7 @@ "h5py>=2.9.0", "jupyter", "matplotlib", - "mlagents_envs==0.10.1", + "mlagents_envs=={}".format(VERSION), "numpy>=1.13.3,<2.0", "Pillow>=4.2.1", "protobuf>=3.6", @@ -45,4 +69,5 @@ ], python_requires=">=3.6.1", entry_points={"console_scripts": ["mlagents-learn=mlagents.trainers.learn:main"]}, + cmdclass={"verify": VerifyVersionCommand}, ) diff --git a/notebooks/getting-started.ipynb b/notebooks/getting-started.ipynb index 8b8b28c606..cf9c28f92c 100755 --- a/notebooks/getting-started.ipynb +++ b/notebooks/getting-started.ipynb @@ -78,7 +78,7 @@ "env = UnityEnvironment(file_name=env_name)\n", "\n", "# Set the default brain to work with\n", - "default_brain = env.brain_names[0]\n", + "default_brain = env.external_brain_names[0]\n", "brain = env.brains[default_brain]" ] }, diff --git a/protobuf-definitions/README.md b/protobuf-definitions/README.md index da3bacbcc0..eb2ef57db7 100644 --- a/protobuf-definitions/README.md +++ b/protobuf-definitions/README.md @@ -20,30 +20,39 @@ Assume the ml-agents repository is checked out to a folder named $MLAGENTS_ROOT. `pip install mypy-protobuf` -If you don't have it already, download the latest version of [nuget](https://www.nuget.org/downloads). -Navigate to your installation of nuget and run the following: -`nuget install Grpc.Tools -Version 1.14.1 -OutputDirectory $MLAGENTS_ROOT\protobuf-definitions` +#### On Windows + +Download and install the latest version of [nuget](https://www.nuget.org/downloads). + +#### On Mac + +`brew install nuget` + +#### On Linux -### Installing Protobuf Compiler +`sudo apt-get install nuget` -On Mac: `brew install protobuf` + +Navigate to your installation of nuget and run the following: + +`nuget install Grpc.Tools -Version 1.14.1 -OutputDirectory $MLAGENTS_ROOT\protobuf-definitions` ## Running Whenever you change the fields of a custom message, you must follow the steps below to create C# and Python files corresponding to the new message. 1. Open a terminal. **Note:** If you're using Anaconda, don't forget to activate the ml-agents environment first. -2. Un-comment line 7 in `make.bat` (for Windows, use `make_for_win.bat`), and set to correct Grpc.Tools sub-directory. -3. Run the `.bat` from the terminal by navigating to `$MLAGENTS_ROOT\protobuf-definitions` and entering `make.bat` (for Windows, use `make_for_win.bat`) +2. Un-comment line 7 in `make.sh` (for Windows, use `make_for_win.bat`), and set to correct Grpc.Tools sub-directory. +3. Run the protobuf generation script from the terminal by navigating to `$MLAGENTS_ROOT\protobuf-definitions` and entering `make.sh` (for Windows, use `make_for_win.bat`) 4. Note any errors generated that may result from setting the wrong directory in step 2. -5. In the generated `UnityToExternalGrpc.cs` file in the `$MLAGENTS_ROOT/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects` folder, you will need to add the following to the beginning of the file: +5. In the generated `UnityToExternalGrpc.cs` file in the `$MLAGENTS_ROOT/UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects` folder, check to see if you need to add the following to the beginning of the file: ```csharp # if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX ``` and the following line to the end - + ```csharp #endif ``` @@ -62,4 +71,4 @@ pip install -e . mlagents-learn ``` -The final line will test if everything was generated and installed correctly. If it worked, you should see the Unity logo. +The final line will test if everything was generated and installed correctly. If it worked, you should see the Unity logo. diff --git a/protobuf-definitions/make.bat b/protobuf-definitions/make.sh similarity index 66% rename from protobuf-definitions/make.bat rename to protobuf-definitions/make.sh index f084c1b9c3..6198dad60d 100755 --- a/protobuf-definitions/make.bat +++ b/protobuf-definitions/make.sh @@ -7,10 +7,9 @@ # COMPILER=[DIRECTORY] SRC_DIR=proto/mlagents/envs/communicator_objects -DST_DIR_C=../UnitySDK/Assets/ML-Agents/Scripts/CommunicatorObjects +DST_DIR_C=../UnitySDK/Assets/ML-Agents/Scripts/Grpc/CommunicatorObjects DST_DIR_P=../ml-agents-envs PROTO_PATH=proto - PYTHON_PACKAGE=mlagents/envs/communicator_objects # clean @@ -21,8 +20,8 @@ mkdir -p $DST_DIR_P/$PYTHON_PACKAGE # generate proto objects in python and C# -protoc --proto_path=proto --csharp_out=$DST_DIR_C $SRC_DIR/*.proto -protoc --proto_path=proto --python_out=$DST_DIR_P --mypy_out=$DST_DIR_P $SRC_DIR/*.proto +$COMPILER/protoc --proto_path=proto --csharp_out=$DST_DIR_C $SRC_DIR/*.proto +$COMPILER/protoc --proto_path=proto --python_out=$DST_DIR_P --mypy_out=$DST_DIR_P $SRC_DIR/*.proto # grpc @@ -41,3 +40,11 @@ FILE=${FILE##*/} echo from .${FILE%.py} import \* >> $DST_DIR_P/$PYTHON_PACKAGE/__init__.py done +# Surround UnityToExternal.cs file with macro +echo "#if UNITY_EDITOR || UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_STANDALONE_LINUX +`cat $DST_DIR_C/UnityToExternalGrpc.cs` +#endif" > $DST_DIR_C/UnityToExternalGrpc.cs + +# Remove the __init__.py file since it is not needed +rm $DST_DIR_P/$PYTHON_PACKAGE/__init__.py +touch $DST_DIR_P/$PYTHON_PACKAGE/__init__.py diff --git a/protobuf-definitions/make_for_win.bat b/protobuf-definitions/make_for_win.bat index 3c729465cd..da7302e75f 100644 --- a/protobuf-definitions/make_for_win.bat +++ b/protobuf-definitions/make_for_win.bat @@ -7,7 +7,7 @@ rem set COMPILER=Grpc.Tools.1.14.1\tools\windows_x64 rem set COMPILER=[DIRECTORY] set SRC_DIR=proto\mlagents\envs\communicator_objects -set DST_DIR_C=..\UnitySDK\Assets\ML-Agents\Scripts\CommunicatorObjects +set DST_DIR_C=..\UnitySDK\Assets\ML-Agents\Scripts\Grpc\CommunicatorObjects set DST_DIR_P=..\ml-agents-envs set PROTO_PATH=proto diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/agent_action_proto.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/agent_action.proto similarity index 88% rename from protobuf-definitions/proto/mlagents/envs/communicator_objects/agent_action_proto.proto rename to protobuf-definitions/proto/mlagents/envs/communicator_objects/agent_action.proto index 03809d9b75..05a00a7f78 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/agent_action_proto.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/agent_action.proto @@ -10,5 +10,5 @@ message AgentActionProto { string text_actions = 2; repeated float memories = 3; float value = 4; - CustomAction custom_action = 5; + CustomActionProto custom_action = 5; } diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/agent_info_proto.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/agent_info.proto similarity index 66% rename from protobuf-definitions/proto/mlagents/envs/communicator_objects/agent_info_proto.proto rename to protobuf-definitions/proto/mlagents/envs/communicator_objects/agent_info.proto index 716ec7d250..f48130eb63 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/agent_info_proto.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/agent_info.proto @@ -1,5 +1,6 @@ syntax = "proto3"; +import "mlagents/envs/communicator_objects/compressed_observation.proto"; import "mlagents/envs/communicator_objects/custom_observation.proto"; option csharp_namespace = "MLAgents.CommunicatorObjects"; @@ -7,7 +8,7 @@ package communicator_objects; message AgentInfoProto { repeated float stacked_vector_observation = 1; - repeated bytes visual_observations = 2; + reserved 2; // deprecated repeated bytes visual_observations = 2; string text_observation = 3; repeated float stored_vector_actions = 4; string stored_text_actions = 5; @@ -17,5 +18,6 @@ message AgentInfoProto { bool max_step_reached = 9; int32 id = 10; repeated bool action_mask = 11; - CustomObservation custom_observation = 12; + CustomObservationProto custom_observation = 12; + repeated CompressedObservationProto compressed_observations = 13; } diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/brain_parameters_proto.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/brain_parameters.proto similarity index 69% rename from protobuf-definitions/proto/mlagents/envs/communicator_objects/brain_parameters_proto.proto rename to protobuf-definitions/proto/mlagents/envs/communicator_objects/brain_parameters.proto index e07cabe987..1b97bba002 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/brain_parameters_proto.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/brain_parameters.proto @@ -1,7 +1,6 @@ syntax = "proto3"; -import "mlagents/envs/communicator_objects/resolution_proto.proto"; -import "mlagents/envs/communicator_objects/space_type_proto.proto"; +import "mlagents/envs/communicator_objects/space_type.proto"; option csharp_namespace = "MLAgents.CommunicatorObjects"; package communicator_objects; @@ -10,7 +9,7 @@ message BrainParametersProto { int32 vector_observation_size = 1; int32 num_stacked_vector_observations = 2; repeated int32 vector_action_size = 3; - repeated ResolutionProto camera_resolutions = 4; + reserved 4; // deprecated repeated ResolutionProto camera_resolutions repeated string vector_action_descriptions = 5; SpaceTypeProto vector_action_space_type = 6; string brain_name = 7; diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/command_proto.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/command.proto similarity index 100% rename from protobuf-definitions/proto/mlagents/envs/communicator_objects/command_proto.proto rename to protobuf-definitions/proto/mlagents/envs/communicator_objects/command.proto diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/compressed_observation.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/compressed_observation.proto new file mode 100644 index 0000000000..0a6798cd2d --- /dev/null +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/compressed_observation.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +option csharp_namespace = "MLAgents.CommunicatorObjects"; +package communicator_objects; + +enum CompressionTypeProto { + NONE = 0; + PNG = 1; +} + +message CompressedObservationProto { + repeated int32 shape = 1; + CompressionTypeProto compression_type = 2; + bytes data = 3; +} diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/custom_action.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/custom_action.proto index 2cbd774d7d..257adb46a0 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/custom_action.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/custom_action.proto @@ -3,5 +3,5 @@ syntax = "proto3"; option csharp_namespace = "MLAgents.CommunicatorObjects"; package communicator_objects; -message CustomAction { +message CustomActionProto { } diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/custom_observation.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/custom_observation.proto index 21b331f216..37203b66cb 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/custom_observation.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/custom_observation.proto @@ -3,5 +3,5 @@ syntax = "proto3"; option csharp_namespace = "MLAgents.CommunicatorObjects"; package communicator_objects; -message CustomObservation { +message CustomObservationProto { } diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/custom_reset_parameters.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/custom_reset_parameters.proto index 89daeda531..f321ffccd8 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/custom_reset_parameters.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/custom_reset_parameters.proto @@ -3,5 +3,5 @@ syntax = "proto3"; option csharp_namespace = "MLAgents.CommunicatorObjects"; package communicator_objects; -message CustomResetParameters { +message CustomResetParametersProto { } diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/demonstration_meta_proto.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/demonstration_meta.proto similarity index 100% rename from protobuf-definitions/proto/mlagents/envs/communicator_objects/demonstration_meta_proto.proto rename to protobuf-definitions/proto/mlagents/envs/communicator_objects/demonstration_meta.proto diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/engine_configuration_proto.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/engine_configuration.proto similarity index 100% rename from protobuf-definitions/proto/mlagents/envs/communicator_objects/engine_configuration_proto.proto rename to protobuf-definitions/proto/mlagents/envs/communicator_objects/engine_configuration.proto diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/environment_parameters_proto.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/environment_parameters.proto similarity index 81% rename from protobuf-definitions/proto/mlagents/envs/communicator_objects/environment_parameters_proto.proto rename to protobuf-definitions/proto/mlagents/envs/communicator_objects/environment_parameters.proto index 0efee4e242..3843ae54c2 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/environment_parameters_proto.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/environment_parameters.proto @@ -7,5 +7,5 @@ package communicator_objects; message EnvironmentParametersProto { map float_parameters = 1; - CustomResetParameters custom_reset_parameters = 2; + CustomResetParametersProto custom_reset_parameters = 2; } diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/header.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/header.proto index 7fd3711678..26478347bf 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/header.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/header.proto @@ -3,7 +3,7 @@ syntax = "proto3"; option csharp_namespace = "MLAgents.CommunicatorObjects"; package communicator_objects; -message Header { +message HeaderProto { int32 status = 1; string message = 2; } diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/resolution_proto.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/resolution_proto.proto deleted file mode 100644 index f17ba352b9..0000000000 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/resolution_proto.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -option csharp_namespace = "MLAgents.CommunicatorObjects"; -package communicator_objects; - -message ResolutionProto { - int32 width = 1; - int32 height = 2; - bool gray_scale = 3; -} - diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/space_type_proto.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/space_type.proto similarity index 71% rename from protobuf-definitions/proto/mlagents/envs/communicator_objects/space_type_proto.proto rename to protobuf-definitions/proto/mlagents/envs/communicator_objects/space_type.proto index d4b6822dec..80e360e5d1 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/space_type_proto.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/space_type.proto @@ -1,7 +1,5 @@ syntax = "proto3"; -import "mlagents/envs/communicator_objects/resolution_proto.proto"; - option csharp_namespace = "MLAgents.CommunicatorObjects"; package communicator_objects; diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_input.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_input.proto index 25acc91518..ab55b00039 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_input.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_input.proto @@ -6,9 +6,9 @@ import "mlagents/envs/communicator_objects/unity_rl_initialization_input.proto"; option csharp_namespace = "MLAgents.CommunicatorObjects"; package communicator_objects; -message UnityInput { - UnityRLInput rl_input = 1; - UnityRLInitializationInput rl_initialization_input = 2; +message UnityInputProto { + UnityRLInputProto rl_input = 1; + UnityRLInitializationInputProto rl_initialization_input = 2; //More messages can be added here } diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_message.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_message.proto index f1a14f5568..bda0452449 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_message.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_message.proto @@ -7,9 +7,9 @@ import "mlagents/envs/communicator_objects/header.proto"; option csharp_namespace = "MLAgents.CommunicatorObjects"; package communicator_objects; -message UnityMessage { - Header header = 1; - UnityOutput unity_output = 2; - UnityInput unity_input = 3; +message UnityMessageProto { + HeaderProto header = 1; + UnityOutputProto unity_output = 2; + UnityInputProto unity_input = 3; } diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_output.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_output.proto index edbd70f802..e46f26f84f 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_output.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_output.proto @@ -6,9 +6,9 @@ import "mlagents/envs/communicator_objects/unity_rl_initialization_output.proto" option csharp_namespace = "MLAgents.CommunicatorObjects"; package communicator_objects; -message UnityOutput { - UnityRLOutput rl_output = 1; - UnityRLInitializationOutput rl_initialization_output = 2; +message UnityOutputProto { + UnityRLOutputProto rl_output = 1; + UnityRLInitializationOutputProto rl_initialization_output = 2; //More messages can be added here } diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_initialization_input.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_initialization_input.proto index d1d9e334e7..d46b050d71 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_initialization_input.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_initialization_input.proto @@ -4,6 +4,6 @@ option csharp_namespace = "MLAgents.CommunicatorObjects"; package communicator_objects; -message UnityRLInitializationInput { +message UnityRLInitializationInputProto { int32 seed = 1; } diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_initialization_output.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_initialization_output.proto index 68819d0d65..0b9ff12c70 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_initialization_output.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_initialization_output.proto @@ -1,13 +1,13 @@ syntax = "proto3"; -import "mlagents/envs/communicator_objects/brain_parameters_proto.proto"; -import "mlagents/envs/communicator_objects/environment_parameters_proto.proto"; +import "mlagents/envs/communicator_objects/brain_parameters.proto"; +import "mlagents/envs/communicator_objects/environment_parameters.proto"; option csharp_namespace = "MLAgents.CommunicatorObjects"; package communicator_objects; // The request message containing the academy's parameters. -message UnityRLInitializationOutput { +message UnityRLInitializationOutputProto { string name = 1; string version = 2; string log_path = 3; diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_input.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_input.proto index e32f3d1abf..be7b7739d9 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_input.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_input.proto @@ -1,13 +1,13 @@ syntax = "proto3"; -import "mlagents/envs/communicator_objects/agent_action_proto.proto"; -import "mlagents/envs/communicator_objects/environment_parameters_proto.proto"; -import "mlagents/envs/communicator_objects/command_proto.proto"; +import "mlagents/envs/communicator_objects/agent_action.proto"; +import "mlagents/envs/communicator_objects/environment_parameters.proto"; +import "mlagents/envs/communicator_objects/command.proto"; option csharp_namespace = "MLAgents.CommunicatorObjects"; package communicator_objects; -message UnityRLInput { +message UnityRLInputProto { message ListAgentActionProto { repeated AgentActionProto value = 1; } diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_output.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_output.proto index 9ce58568f5..bd169928ff 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_output.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_rl_output.proto @@ -1,11 +1,11 @@ syntax = "proto3"; -import "mlagents/envs/communicator_objects/agent_info_proto.proto"; +import "mlagents/envs/communicator_objects/agent_info.proto"; option csharp_namespace = "MLAgents.CommunicatorObjects"; package communicator_objects; -message UnityRLOutput { +message UnityRLOutputProto { message ListAgentInfoProto { repeated AgentInfoProto value = 1; } diff --git a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_to_external.proto b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_to_external.proto index f323788b6d..261bac6671 100644 --- a/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_to_external.proto +++ b/protobuf-definitions/proto/mlagents/envs/communicator_objects/unity_to_external.proto @@ -5,8 +5,8 @@ import "mlagents/envs/communicator_objects/unity_message.proto"; option csharp_namespace = "MLAgents.CommunicatorObjects"; package communicator_objects; -service UnityToExternal { +service UnityToExternalProto { // Sends the academy parameters - rpc Exchange(UnityMessage) returns (UnityMessage) {} + rpc Exchange(UnityMessageProto) returns (UnityMessageProto) {} } diff --git a/run-standalone-build-osx.sh b/run-standalone-build-osx.sh new file mode 100755 index 0000000000..988ba571ec --- /dev/null +++ b/run-standalone-build-osx.sh @@ -0,0 +1,45 @@ +#!/bin/bash + +set -eo pipefail + +if [[ -z "${UNITY_VERSION}" ]]; then + + echo "Environment Variable UNITY_VERSION was not set" + exit 1 + +else + BOKKEN_UNITY="/Users/bokken/${UNITY_VERSION}/Unity.app/Contents/MacOS/Unity" + HUB_UNITY="/Applications/Unity/Hub/Editor/${UNITY_VERSION}/Unity.app/Contents/MacOS/Unity" + + if [[ -f ${BOKKEN_UNITY} ]]; then + UNITY=${BOKKEN_UNITY} + else + UNITY=${HUB_UNITY} + fi + + pushd $(dirname "${0}") > /dev/null + BASETPATH=$(pwd -L) + popd > /dev/null + + echo "Cleaning previous results" + + echo "Starting tests via $UNITY" + + CMD_LINE="$UNITY -projectPath $BASETPATH/UnitySDK -logfile - -batchmode -executeMethod MLAgents.StandaloneBuildTest.BuildStandalonePlayerOSX" + + echo "$CMD_LINE ..." + + ${CMD_LINE} + RES=$? + + if [[ "${RES}" -eq "0" ]]; then + echo "Standalone build completed successfully."; + exit 0; + else + echo "Standalone build failed." + exit 1; + fi + + exit ${RES} + +fi diff --git a/run-tests-editmode-osx-editor.sh b/run-tests-editmode-osx-editor.sh new file mode 100755 index 0000000000..6d6c1ae42d --- /dev/null +++ b/run-tests-editmode-osx-editor.sh @@ -0,0 +1,57 @@ +#!/bin/bash + +set -eo pipefail + +if [[ -z "${UNITY_VERSION}" ]]; then + + echo "Environment Variable UNITY_VERSION was not set" + exit 1 + +else + BOKKEN_UNITY="/Users/bokken/${UNITY_VERSION}/Unity.app/Contents/MacOS/Unity" + HUB_UNITY="/Applications/Unity/Hub/Editor/${UNITY_VERSION}/Unity.app/Contents/MacOS/Unity" + + if [[ -f ${BOKKEN_UNITY} ]]; then + UNITY=${BOKKEN_UNITY} + else + UNITY=${HUB_UNITY} + fi + + pushd $(dirname "${0}") > /dev/null + BASETPATH=$(pwd -L) + popd > /dev/null + + echo "Cleaning previous results" + + if [[ -e ${BASETPATH}/results.xml ]] + then + rm ${BASETPATH}/results.xml + fi + + echo "Starting tests via $UNITY" + + CMD_LINE="$UNITY -runTests -logfile - -projectPath $BASETPATH/UnitySDK -testResults $BASETPATH/results.xml -testPlatform editmode" + + echo "$CMD_LINE ..." + + $CMD_LINE + RES=$? + + TOTAL=$(echo 'cat /test-run/test-suite/@total' | xmllint --shell results.xml | awk -F'[="]' '!/>/{print $(NF-1)}') + PASSED=$(echo 'cat /test-run/test-suite/@passed' | xmllint --shell results.xml | awk -F'[="]' '!/>/{print $(NF-1)}') + FAILED=$(echo 'cat /test-run/test-suite/@failed' | xmllint --shell results.xml | awk -F'[="]' '!/>/{print $(NF-1)}') + DURATION=$(echo 'cat /test-run/test-suite/@duration' | xmllint --shell results.xml | awk -F'[="]' '!/>/{print $(NF-1)}') + + echo "$TOTAL tests executed in ${DURATION}s: $PASSED passed, $FAILED failed. More details in results.xml" + + if [[ ${RES} -eq 0 ]] && [[ -e ${BASETPATH}/results.xml ]]; then + echo "Test run SUCCEEDED!" + else + echo "Test run FAILED!" + fi + + rm "${BASETPATH}/results.xml" + + exit ${RES} + +fi \ No newline at end of file diff --git a/test_constraints_max_version.txt b/test_constraints_max_version.txt index a4326315e6..9d8f6832b9 100644 --- a/test_constraints_max_version.txt +++ b/test_constraints_max_version.txt @@ -2,4 +2,4 @@ # For projects with upper bounds, we should periodically update this list to the latest release version grpcio>=1.23.0 numpy>=1.17.2 -tensorflow>=1.14.0 +tensorflow>=1.14.0,<2.0 diff --git a/utils/validate_versions.py b/utils/validate_versions.py new file mode 100755 index 0000000000..118779260f --- /dev/null +++ b/utils/validate_versions.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python + +import os +import sys +from typing import Dict + +VERSION_LINE_START = "VERSION = " + +DIRECTORIES = ["ml-agents", "ml-agents-envs", "gym-unity"] + + +def extract_version_string(filename): + with open(filename) as f: + for l in f.readlines(): + if l.startswith(VERSION_LINE_START): + return l.replace(VERSION_LINE_START, "").strip() + return None + + +def check_versions() -> bool: + version_by_dir: Dict[str, str] = {} + for directory in DIRECTORIES: + path = os.path.join(directory, "setup.py") + version = extract_version_string(path) + print(f"Found version {version} for {directory}") + version_by_dir[directory] = version + + # Make sure we have exactly one version, and it's not none + versions = set(version_by_dir.values()) + if len(versions) != 1 or None in versions: + print("Each setup.py must have the same VERSION string.") + return False + return True + + +if __name__ == "__main__": + ok = check_versions() + return_code = 0 if ok else 1 + sys.exit(return_code)